code stringlengths 51 2.34k | docstring stringlengths 11 171 |
|---|---|
def convert_notebooks(self):
fnames = self.get_selected_filenames()
if not isinstance(fnames, (tuple, list)):
fnames = [fnames]
for fname in fnames:
self.convert_notebook(fname) | Convert IPython notebooks to Python scripts in editor |
def _recv_flow(self, method_frame):
self.channel._active = method_frame.args.read_bit()
args = Writer()
args.write_bit(self.channel.active)
self.send_frame(MethodFrame(self.channel_id, 20, 21, args))
if self._flow_control_cb is not None:
self._flow_control_cb() | Receive a flow control command from the broker |
def elog(exc, func, args=None, kwargs=None, str=str, pretty=True, name=''):
from .str import safe_str
args = args if args else ()
kwargs = kwargs if kwargs else {}
name = '{}.{}'.format(get_mod(func), name) if name else full_funcname(func)
if pretty:
invocation = ', '.join([safe_str(arg) for... | For logging exception-raising function invocations during randomized unit tests. |
def submit(self):
self._newflg = False
ret = list()
for buf in self._buffer.values():
buf = copy.deepcopy(buf)
if self._fdpext:
buf['fpout'] = f"{self._fproot}/{buf['label']}.{self._fdpext}"
else:
del buf['fpout']
bu... | Submit traced TCP flows. |
def _transition_loop(self):
while self._transitions:
start = time.time()
for transition in self._transitions:
transition.step()
if transition.finished:
self._transitions.remove(transition)
time_delta = time.time() - start
... | Execute all queued transitions step by step. |
def tas2mach(Vtas, H):
a = vsound(H)
Mach = Vtas/a
return Mach | True Airspeed to Mach number |
def add_intspin(self, setting):
tab = self.panel(setting.tab)
default = setting.value
(minv, maxv) = setting.range
ctrl = wx.SpinCtrl(tab, -1,
initial = default,
min = minv,
max = maxv)
self._add_inp... | add a spin control |
def visit_Set(self, pattern):
if len(pattern.elts) > MAX_UNORDERED_LENGTH:
raise DamnTooLongPattern("Pattern for Set is too long")
return (isinstance(self.node, Set) and
any(self.check_list(self.node.elts, pattern_elts)
for pattern_elts in permutations(pat... | Set have unordered values. |
def draw_image(self, metric, limit=5):
rows = 1
cols = limit
self.ax.axis("off")
gs = matplotlib.gridspec.GridSpecFromSubplotSpec(
rows, cols, subplot_spec=self.gs)
for i, image in enumerate(metric.data[-cols:]):
ax = self.figure.add_subplot(gs[0, i])
... | Display a series of images at different time steps. |
def connect(self):
logger.info("Connecting to RabbitMQ on {broker_url}...".format(
broker_url=self.broker_url))
super(RabbitMQSubscriber, self).connect()
q = Queue(exchange=self.exchange, exclusive=True, durable=False)
self.queue = q(self.connection.default_channel)
s... | Connects to RabbitMQ and starts listening |
def dev_version():
md5_hash = hashlib.md5()
py_files = sorted(list_files(suffix=".py"))
if not py_files:
return ''
for filename in py_files:
with open(filename, 'rb') as fobj:
content = fobj.read()
md5_hash.update(content)
return md5_hash.hexdigest() | Returns a hexdigest of all the python files in the module. |
def main():
while 1:
events = get_mouse()
for event in events:
print(event.ev_type, event.code, event.state) | Just print out some event infomation when the mouse is used. |
def parse_value(proto):
if proto.HasField('floatValue'):
return proto.floatValue
elif proto.HasField('doubleValue'):
return proto.doubleValue
elif proto.HasField('sint32Value'):
return proto.sint32Value
elif proto.HasField('uint32Value'):
return proto.uint32Value
elif... | Convers a Protobuf `Value` from the API into a python native value |
def blockgen(bytes, block_size=16):
for i in range(0, len(bytes), block_size):
block = bytes[i:i + block_size]
block_len = len(block)
if block_len > 0:
yield block
if block_len < block_size:
break | a block generator for pprp |
def query_mxrecords(self):
import dns.resolver
logging.info('Resolving DNS query...')
answers = dns.resolver.query(self.domain, 'MX')
addresses = [answer.exchange.to_text() for answer in answers]
logging.info(
'{} records found:\n{}'.format(
len(addres... | Looks up for the MX DNS records of the recipient SMTP server |
def dump_queue(queue):
result = []
try:
while True:
item = queue.get_nowait()
result.append(item)
except: Empty
return result | Empties all pending items in a queue and returns them in a list. |
def search(geo_coords, mode=2, verbose=True):
if not isinstance(geo_coords, tuple) and not isinstance(geo_coords, list):
raise TypeError('Expecting a tuple or a tuple/list of tuples')
elif not isinstance(geo_coords[0], tuple):
geo_coords = [geo_coords]
_rg = RGeocoder(mode=mode, verbose=verb... | Function to query for a list of coordinates |
def _api_get(self):
json = self._client.get(type(self).api_endpoint, model=self)
self._populate(json) | A helper method to GET this object from the server |
def _load_text_csv_file(self, filename, separator=',', **kwargs):
rdd_input = self.sc.textFile(filename)
def load_csv_record(line):
input_stream = StringIO.StringIO(line)
reader = csv.reader(input_stream, delimiter=',')
payload = reader.next()
key = payloa... | Return a pair RDD where key is taken from first column, remaining columns are named after their column id as string |
def finish(self):
self.report(fraction=1.0)
key = self.stack_key
if key is not None:
if self.data.get(key) is None:
self.data[key] = []
start_time = self.current_times.get(key) or time()
self.data[key].append(Dict(runtime=time()-start_time, **self.params)) | record the current stack process as finished |
def saved_xids(self):
if self._saved_xids is None:
self._saved_xids = []
if self.debug:
fpfn = os.path.join(self.tcex.args.tc_temp_path, 'xids-saved')
if os.path.isfile(fpfn) and os.access(fpfn, os.R_OK):
with open(fpfn) as fh:
... | Return previously saved xids. |
def try_run_setup(**kwargs):
try:
run_setup(**kwargs)
except Exception as e:
print(str(e))
if "xgboost" in str(e).lower():
kwargs["test_xgboost"] = False
print("Couldn't install XGBoost for testing!")
try_run_setup(**kwargs)
elif "lightgbm" in ... | Fails gracefully when various install steps don't work. |
def encompasses(self, span):
if isinstance(span, list):
return [sp for sp in span if self._encompasses(sp)]
return self._encompasses(span) | Returns true if the given span fits inside this one |
def dump(self):
org = min(self.memory_bytes.keys())
OUTPUT = []
align = []
for i in range(org, max(self.memory_bytes.keys()) + 1):
if gl.has_errors:
return org, OUTPUT
try:
try:
a = [x for x in self.orgs[i] if is... | Returns a tuple containing code ORG, and a list of OUTPUT |
def _generic_model(self, z3_model):
model = { }
for m_f in z3_model:
n = _z3_decl_name_str(m_f.ctx.ctx, m_f.ast).decode()
m = m_f()
me = z3_model.eval(m)
model[n] = self._abstract_to_primitive(me.ctx.ctx, me.ast)
return model | Converts a Z3 model to a name->primitive dict. |
def _url(endpoint: str, sandbox: bool=False) -> str:
if sandbox is True:
url = BASE_URL_SANDBOX
else:
url = BASE_URL
return "{0}{1}".format(url, endpoint) | Build a URL from the API's base URLs. |
def decodeCommandLine(self, cmdline):
codec = getattr(sys.stdin, 'encoding', None) or sys.getdefaultencoding()
return unicode(cmdline, codec) | Turn a byte string from the command line into a unicode string. |
def update_button_status(self):
if len(self.displaced.currentField()) > 0:
self.button_box.button(
QtWidgets.QDialogButtonBox.Ok).setEnabled(True)
else:
self.button_box.button(
QtWidgets.QDialogButtonBox.Ok).setEnabled(False) | Function to enable or disable the Ok button. |
def remove_record(self, common_name):
bundle = self.get_files(common_name)
num_signees = len(Counter(bundle.record['signees']))
if bundle.is_ca() and num_signees > 0:
raise CertificateAuthorityInUseError(
"Authority {name} has signed {x} certificates"
... | Delete the record associated with this common name |
def generate(env):
"Add RPCGEN Builders and construction variables for an Environment."
client = Builder(action=rpcgen_client, suffix='_clnt.c', src_suffix='.x')
header = Builder(action=rpcgen_header, suffix='.h', src_suffix='.x')
service = Builder(action=rpcgen_service, suffix='_svc.c', src_s... | Add RPCGEN Builders and construction variables for an Environment. |
def inverse(self):
invr = np.linalg.inv(self.affine_matrix)
return SymmOp(invr) | Returns inverse of transformation. |
def getSkeletalReferenceTransforms(self, action, eTransformSpace, eReferencePose, unTransformArrayCount):
fn = self.function_table.getSkeletalReferenceTransforms
pTransformArray = VRBoneTransform_t()
result = fn(action, eTransformSpace, eReferencePose, byref(pTransformArray), unTransformArrayCou... | Fills the given buffer with the transforms for a specific static skeletal reference pose |
def execute(self):
parser = LaxOptionParser(
usage="%prog subcommand [options] [args]",
option_list=BaseCommand.option_list
)
options, args = parser.parse_args(self.argv)
options = handle_default_options(options)
try:
subcommand = self.argv[1]
... | Run the command with the command line arguments |
def noise_uniform(self, lower_bound, upper_bound):
assert upper_bound > lower_bound
nu = self.sym.sym('nu_{:d}'.format(len(self.scope['nu'])))
self.scope['nu'].append(nu)
return lower_bound + nu*(upper_bound - lower_bound) | Create a uniform noise variable |
def ensure_chambers():
france = Country.objects.get(name="France")
for key in ('AN', 'SEN'):
variant = FranceDataVariants[key]
Chamber.objects.get_or_create(name=variant['chamber'],
abbreviation=variant['abbreviation'],
... | Ensures chambers are created |
def _sim_fill(r1, r2, imsize):
bbsize = (
(max(r1["max_x"], r2["max_x"]) - min(r1["min_x"], r2["min_x"]))
* (max(r1["max_y"], r2["max_y"]) - min(r1["min_y"], r2["min_y"]))
)
return 1.0 - (bbsize - r1["size"] - r2["size"]) / imsize | calculate the fill similarity over the image |
def to_dataframe(self):
variant_properties = [
"contig",
"start",
"ref",
"alt",
"is_snv",
"is_transversion",
"is_transition"
]
def row_from_effect(effect):
row = OrderedDict()
row['variant... | Build a dataframe from the effect collection |
def fetch_pool(repo_url, branch='master', reuse_existing=False):
repo_name = get_repo_name(repo_url)
lib_dir = get_lib_dir()
pool_dir = get_pool_dir(repo_name)
print('... fetching %s ' % repo_name)
if os.path.exists(pool_dir):
if not reuse_existing:
raise Exception('ERROR: reposi... | Fetch a git repository from ``repo_url`` and returns a ``FeaturePool`` object. |
def maybe_reverse_features(self, feature_map):
if not self._was_reversed:
return
inputs = feature_map.pop("inputs", None)
targets = feature_map.pop("targets", None)
inputs_seg = feature_map.pop("inputs_segmentation", None)
targets_seg = feature_map.pop("targets_segmentation", None)
inputs_... | Reverse features between inputs and targets if the problem is '_rev'. |
async def jsk_source(self, ctx: commands.Context, *, command_name: str):
command = self.bot.get_command(command_name)
if not command:
return await ctx.send(f"Couldn't find command `{command_name}`.")
try:
source_lines, _ = inspect.getsourcelines(command.callback)
... | Displays the source code for a command. |
def cli(inargs=None):
parser = argparse.ArgumentParser()
parser.add_argument(
'--version', '-V',
action='version',
version='%%(prog)s %s' % __version__
)
parser.add_argument(
'filename',
metavar="filename",
help="Input STEM file"
)
parser.add_argum... | Commandline interface for receiving stem files |
def _emit_warning(cls, message):
sys.stderr.write('WARNING: {message}\n'.format(message=message))
sys.stderr.flush() | Print an warning message to STDERR. |
def _setup_logger(self, level, log_file):
if logger.handlers:
return
level = getattr(logging, level.upper())
logger.setLevel(level)
formatter = logging.Formatter(
'[%(levelname)s] %(asctime)s - %(module)s.%(funcName)s() - %(message)s')
handler = logging.St... | Setup log level and log file if set |
def receive_pong(self, pong: Pong):
message_id = ('ping', pong.nonce, pong.sender)
async_result = self.messageids_to_asyncresults.get(message_id)
if async_result is not None:
self.log_healthcheck.debug(
'Pong received',
sender=pex(pong.sender),
... | Handles a Pong message. |
def _clean_record(self, record):
for k, v in dict(record).items():
if isinstance(v, dict):
v = self._clean_record(v)
if v is None:
record.pop(k)
return record | Remove all fields with `None` values |
def truncate_money(money: Money) -> Money:
amount = truncate_to(money.amount, money.currency)
return Money(amount, money.currency) | Truncates money amount to the number of decimals corresponding to the currency |
def main(
gpus:Param("The GPUs to use for distributed training", str)='all',
script:Param("Script to run", str, opt=False)='',
args:Param("Args to pass to script", nargs='...', opt=False)=''
):
"PyTorch distributed training launch helper that spawns multiple distributed processes"
current_env = os.e... | PyTorch distributed training launch helper that spawns multiple distributed processes |
def dims_intersect(self):
return set.intersection(*map(
set, (getattr(arr, 'dims_intersect', arr.dims) for arr in self))) | Dimensions of the arrays in this list that are used in all arrays |
def from_geojson(cls, filename):
with open(filename) as fd:
geometry = json.load(fd)
if 'type' not in geometry:
raise TypeError("%s is not a valid geojson." % (filename,))
return cls(to_shape(geometry), WGS84_CRS) | Load vector from geojson. |
def _update_privacy(self, table_name, privacy):
ds_manager = DatasetManager(self.auth_client)
dataset = ds_manager.get(table_name)
dataset.privacy = privacy
dataset.save() | Updates the privacy of a dataset |
def lnprob(self,theta):
global niter
params,priors,loglike = self.params,self.priors,self.loglike
_lnprior = self.lnprior(theta)
if np.isfinite(_lnprior):
_lnlike = self.lnlike(theta)
else:
_lnprior = -np.inf
_lnlike = -np.inf
_lnprob =... | Logarithm of the probability |
def original(modname):
original_name = '__original_module_' + modname
if original_name in sys.modules:
return sys.modules.get(original_name)
saver = SysModulesSaver((modname,))
sys.modules.pop(modname, None)
try:
real_mod = __import__(modname, {}, {}, modname.split('.')[:-1])
... | This returns an unpatched version of a module. |
def parseArgs():
parser = argparse.ArgumentParser()
parser.add_argument("name", help="the file you want to split")
parser.add_argument("out1", help="the name of the first file you want to output")
parser.add_argument("out2", help="the name of the second file you want to output")
return parser.parse_... | Parses arguments passed in via the command line |
def coordinate(self, center=None, radius=0.001):
if center is None:
return Decimal(str(self.generator.random.randint(-180000000, 180000000) / 1000000.0)).quantize(
Decimal(".000001"),
)
else:
center = float(center)
radius = float(radius)
... | Optionally center the coord and pick a point within radius. |
def run():
print("Environment", os.environ)
try:
os.environ["SELENIUM"]
except KeyError:
print("Please set the environment variable SELENIUM to Selenium URL")
sys.exit(1)
driver = WhatsAPIDriver(client='remote', command_executor=os.environ["SELENIUM"])
print("Waiting for QR")... | Locks the main thread while the subscription in running |
def _flushBuffer(self, request):
assert request is self.requestSession.request
self.requestSession.writeData(self.buffer)
self.buffer = [] | Flush any pending data from the buffer to the request |
def disassociate_health_monitor(self, pool, health_monitor):
path = (self.disassociate_pool_health_monitors_path %
{'pool': pool, 'health_monitor': health_monitor})
return self.delete(path) | Disassociate specified load balancer health monitor and pool. |
def _remove_uri_scheme_from_textbuffer(self, scheme):
length = len(scheme)
while length:
if length < len(self._textbuffer[-1]):
self._textbuffer[-1] = self._textbuffer[-1][:-length]
break
length -= len(self._textbuffer[-1])
self._textbu... | Remove the URI scheme of a new external link from the textbuffer. |
def classview_for(self, action='view'):
app = current_app._get_current_object()
return self.view_for_endpoints[app][action][0](self) | Return the classview that contains the viewhandler for the specified action |
def process_config_dict(self, key, d, level):
lines = []
for k, v in d.items():
k = "CONFIG {}".format(self.quoter.add_quotes(k.upper()))
v = self.quoter.add_quotes(v)
lines.append(self.__format_line(self.whitespace(level, 1), k, v))
return lines | Process the CONFIG block |
def _get_conn(socket=DEFAULT_SOCKET_URL):
assert os.path.exists(socket), '{0} does not exist.'.format(socket)
issock = os.stat(socket).st_mode
assert stat.S_ISSOCK(issock), '{0} is not a socket.'.format(socket)
ha_conn = haproxy.conn.HaPConn(socket)
return ha_conn | Get connection to haproxy socket. |
def get(self, project_name, updatetime=None, md5sum=None):
if time.time() - self.last_check_projects > self.CHECK_PROJECTS_INTERVAL:
self._check_projects()
if self._need_update(project_name, updatetime, md5sum):
self._update_project(project_name)
return self.projects.get(... | get project data object, return None if not exists |
def check_success(self, device_id, sent_cmd1, sent_cmd2):
device_id = device_id.upper()
self.logger.info('check_success: for device %s cmd1 %s cmd2 %s',
device_id, sent_cmd1, sent_cmd2)
sleep(2)
status = self.get_buffer_status(device_id)
check_id = status... | Check if last command succeeded by checking buffer |
async def resolve(self, client):
if self.resolved:
return
if not self._resolve_lock:
self._resolve_lock = asyncio.Lock(loop=client.loop)
async with self._resolve_lock:
if not self.resolved:
await self._resolve(client)
self.resol... | Helper method to allow event builders to be resolved before usage |
def load_plugins(self, args=None):
for item in os.listdir(plugins_path):
if (item.startswith(self.header) and
item.endswith(".py") and
item != (self.header + "plugin.py")):
self._load_plugin(os.path.basename(item),
... | Load all plugins in the 'plugins' folder. |
def mangle(data_point):
temp_dict = {}
temp_dict.update(data_point)
temp_dict['utc_datetime'] = \
datetime.datetime.utcfromtimestamp(temp_dict['time'])
if 'solar' in data_point:
temp_dict['GHI (W/m^2)'] = data_point['solar']['ghi']
temp_dict['DNI (W/m^2)'] = data_point['solar']['... | mangle data into expected format. |
def _add_message_info_multiple(self, msg_info):
if msg_info.key in self._msg_info_multiple_dict:
if msg_info.is_continued:
self._msg_info_multiple_dict[msg_info.key][-1].append(msg_info.value)
else:
self._msg_info_multiple_dict[msg_info.key].append([msg_in... | add a message info multiple to self._msg_info_multiple_dict |
def print_continuum(self):
numpoints = len(self.runtime._keys)
if numpoints:
print('Numpoints in continuum: {}'.format(numpoints))
else:
print('Continuum empty')
for p in self.get_points():
point, node = p
print('{} ({})'.format(node, point... | Prints a ketama compatible continuum report. |
def _closing_bracket_index(self, text, bpair=('(', ')')):
level = 1
for i, char in enumerate(text[1:]):
if char == bpair[0]:
level += 1
elif char == bpair[1]:
level -= 1
if level == 0:
return i + 1 | Return the index of the closing bracket that matches the opening bracket at the start of the text. |
def receive(self, timeout=None):
log.debug('Receiving')
if not self._socket:
log.warn('No connection')
return
try:
if timeout:
rv = self._socket.poll(timeout)
if not rv:
log.info('Connection timeouted')
... | Receive data through websocket |
def _nics_equal(nic1, nic2):
def _filter_nic(nic):
return {
'type': nic.attrib['type'],
'source': nic.find('source').attrib[nic.attrib['type']] if nic.find('source') is not None else None,
'mac': nic.find('mac').attrib['address'].lower() if nic.find('mac') is not None els... | Test if two interface elements should be considered like the same device |
def _configure(configuration_details):
path = Path(configuration_details.path).expanduser()
with path.open('a') as shell_config:
shell_config.write(u'\n')
shell_config.write(configuration_details.content)
shell_config.write(u'\n') | Adds alias to shell config. |
def _check_forest(self, sensors):
if self in sensors:
raise ValueError('Circular dependency in sensors: %s is its own'
'parent.' % (self.name,))
sensors.add(self)
for parent in self._parents:
parent._check_forest(sensors) | Validate that this sensor doesn't end up referencing itself. |
def CheckEmail(self, email, checkTypo=False):
contents = email.split('@')
if len(contents) == 2:
if contents[1] in self.valid:
return True
return False | Checks a Single email if it is correct |
def send(self, diffTo, diffFrom):
diff = self.toObj.diff(diffTo, diffFrom)
self._open(self.butterStore.send(diff)) | Do a btrfs send. |
def save_method(elements, module_path):
for elem, signature in elements.items():
if isinstance(signature, dict):
save_method(signature, module_path + (elem,))
elif isinstance(signature, Class):
save_method(signature.fields, module_path + (elem,))
elif signature.ismeth... | Recursively save methods with module name and signature. |
def patch(self, patch):
self._contents.update({
'patch_path': patch.original_file_path,
'description': patch.description,
'columns': patch.columns,
'docrefs': patch.docrefs,
})
self.validate() | Given a ParsedNodePatch, add the new information to the node. |
def add_to_linestring(position_data, kml_linestring):
global kml
position_data[2] += float(args.aoff)
kml_linestring.coords.addcoordinates([position_data]) | add a point to the kml file |
def onKeyReleaseInCanvas(self, event):
char_map = { 'w':'move 0', 'a':'strafe 0', 's':'move 0', 'd':'strafe 0', ' ':'jump 0' }
keysym_map = { 'Left':'turn 0', 'Right':'turn 0', 'Up':'pitch 0', 'Down':'pitch 0', 'Shift_L':'crouch 0', 'Shift_R':'crouch 0',
'1':'hotbar.1 0', '2':'ho... | Called when a key is released when the command entry box has focus. |
def query_walkers():
return [
import_string(walker)() if isinstance(walker, six.string_types)
else walker() for walker in current_app.config[
'COLLECTIONS_QUERY_WALKERS']
] | Return query walker instances. |
def start(self):
logger.info("starting process")
process = os.fork()
time.sleep(0.01)
if process != 0:
logger.debug('starting child watcher')
self.loop.reset()
self.child_pid = process
self.watcher = pyev.Child(self.child_pid, False, self.l... | Start the process, essentially forks and calls target function. |
def callable_name(callable_obj):
try:
if (isinstance(callable_obj, type)
and issubclass(callable_obj, param.ParameterizedFunction)):
return callable_obj.__name__
elif (isinstance(callable_obj, param.Parameterized)
and 'operation' in callable_obj.params()):
... | Attempt to return a meaningful name identifying a callable or generator |
def get(self):
res = self.fs.get_filesystem_details()
res = res.to_dict()
self.write(res) | Return details for the filesystem, including configured volumes. |
def DeleteOldRuns(self, job, cutoff_timestamp=None, token=None):
if cutoff_timestamp is None:
raise ValueError("cutoff_timestamp can't be None")
child_flows = list(job.ListChildren(age=cutoff_timestamp))
with queue_manager.QueueManager(token=token) as queuemanager:
queuemanager.MultiDestroyFlowS... | Deletes flows initiated by the job that are older than specified. |
def to_dict(self):
result = {"mapreduce_spec": self.mapreduce_spec.to_json_str(),
"shard_id": self.shard_id,
"slice_id": str(self.slice_id),
"input_reader_state": self.input_reader.to_json_str(),
"initial_input_reader_state":
self.initial_input_r... | Convert state to dictionary to save in task payload. |
def label(self):
if not self.desc:
return "%.3fs" % self.timestamp
return "%s (%.3fs)" % (self.desc, self.timestamp) | Return timestamped label for this snapshot, or a raw timestamp. |
def library_directories(self):
libs = self.find_products('library')
if len(libs) > 0:
return [os.path.join(self.output_folder)]
return [] | Return a list of directories containing any static libraries built by this IOTile. |
def randsample(vec, nr_samples, with_replacement = False):
if not with_replacement:
return np.random.permutation(vec)[0:nr_samples]
else:
return np.asarray(vec)[np.random.randint(0, len(vec), nr_samples)] | Draws nr_samples random samples from vec. |
def _enqueue_init_updates(self):
assert self.state.bgp_state == const.BGP_FSM_ESTABLISHED
if self.is_mbgp_cap_valid(RF_RTC_UC):
self._peer_manager.comm_all_rt_nlris(self)
self._schedule_sending_init_updates()
else:
tm = self._core_service.table_manager
... | Enqueues current routes to be shared with this peer. |
def _handle_tag_pillar_refresh(self, tag, data):
yield self.pillar_refresh(
force_refresh=data.get('force_refresh', False),
notify=data.get('notify', False)
) | Handle a pillar_refresh event |
def disconnect(self):
if self.connected and self.channel:
logging.debug("Disconnecting KNX/IP tunnel...")
frame = KNXIPFrame(KNXIPFrame.DISCONNECT_REQUEST)
frame.body = self.hpai_body()
if self.seq < 0xff:
self.seq += 1
else:
... | Disconnect an open tunnel connection |
def _timedatectl():
ret = __salt__['cmd.run_all'](['timedatectl'], python_shell=False)
if ret['retcode'] != 0:
msg = 'timedatectl failed: {0}'.format(ret['stderr'])
raise CommandExecutionError(msg)
return ret | get the output of timedatectl |
def active(self, include=None):
return self._get(self._build_url(self.endpoint.active(include=include))) | Return all active views. |
def close_pingbacks(self, request, queryset):
queryset.update(pingback_enabled=False)
self.message_user(
request, _('Pingbacks are now closed for selected entries.')) | Close the pingbacks for selected entries. |
def print_vm_info(vm):
summary = vm.summary
print('Name : ', summary.config.name)
print('Path : ', summary.config.vmPathName)
print('Guest : ', summary.config.guestFullName)
annotation = summary.config.annotation
if annotation is not None and annotation != '':
print('Annotation : ', an... | Print information for a particular virtual machine |
def credibleregions(self, probs):
return [brentq(lambda l: self.pdf[self.pdf > l].sum() - p, 0.0, 1.0) for p in probs] | Calculates the credible regions. |
def from_coordinates(cls, coordinates):
prim = cls()
for coord in coordinates:
pm = PseudoMonomer(ampal_parent=prim)
pa = PseudoAtom(coord, ampal_parent=pm)
pm.atoms = OrderedDict([('CA', pa)])
prim.append(pm)
prim.relabel_all()
return prim | Creates a `Primitive` from a list of coordinates. |
def _tm(self, theta, phi, psi, dx, dy, dz):
matrix = self.get_matrix(theta, phi, psi, dx, dy, dz)
coord = matrix.dot(self.coord2)
dist = coord - self.coord1
d_i2 = (dist * dist).sum(axis=0)
tm = -(1 / (1 + (d_i2 / self.d02)))
return tm | Compute the minimisation target, not normalised. |
def mutating_join(*args, **kwargs):
left = args[0]
right = args[1]
if 'by' in kwargs:
left_cols, right_cols = get_join_cols(kwargs['by'])
else:
left_cols, right_cols = None, None
if 'suffixes' in kwargs:
dsuffixes = kwargs['suffixes']
else:
dsuffixes = ('_x', '_y')
if left._grouped_on:
... | generic function for mutating dplyr-style joins |
def looks_like_xml(text):
if xml_decl_re.match(text):
return True
key = hash(text)
try:
return _looks_like_xml_cache[key]
except KeyError:
m = doctype_lookup_re.match(text)
if m is not None:
return True
rv = tag_re.search(text[:1000]) is not None
... | Check if a doctype exists or if we have some tags. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.