code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def addPSF(self, psf, date=None, info='', light_spectrum='visible'):
self._registerLight(light_spectrum)
date = _toDate(date)
f = self.coeffs['psf']
if light_spectrum not in f:
f[light_spectrum] = []
f[light_spectrum].insert(_insertDateIndex(date, f[light_spectr... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'addPSF'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'children': []... | add a new point spread function |
def send_mail(self, subject, to, template, **template_ctx):
if not self.mail:
from warnings import warn
warn('Attempting to send mail without the mail bundle installed! '
'Please install it, or fix your configuration.')
return
self.mail.send(subject, ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'send_mail'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children': ... | Utility method to send mail with the `mail` template context. |
def readCache(self, filename):
with open(filename, 'rb') as f:
self.modules = pickle.load(f) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'readCache'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Load the graph from a cache file. |
def check_runner(self):
if os.getcwd() not in sys.path:
sys.path.append(os.getcwd())
if self.runner is None:
self.runner = Runner(self.comp, exit=self.exit_runner, store=self.mypy) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_runner'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Make sure there is a runner. |
def _find_usage_gateways(self):
gws = self.conn.describe_internet_gateways()
self.limits['Internet gateways']._add_current_usage(
len(gws['InternetGateways']),
aws_type='AWS::EC2::InternetGateway',
) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_find_usage_gateways'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | find usage for Internet Gateways |
def _buildFileUrl(self, xml_req):
return '%(protocol)s://%(host)s:%(port)s%(xml_req)s'%{
'protocol': self._protocol,
'host': self._host,
'port': self._port,
'xml_req': xml_req,
} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_buildFileUrl'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Builds url for fetching the files from FM. |
def remove_excluded(self):
sources = list(self.sources.values())
for src in sources:
if src.excluded:
del self.sources[src.name]
src.imports = [m for m in src.imports if not self._exclude(m)]
src.imported_by = [m for m in src.imported_by if not self._e... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_excluded'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Remove all sources marked as excluded. |
def scale_to_vol(self, vol):
f = np.exp((np.log(vol) - np.log(self.vol)) / self.n)
self.expand *= f
self.cov *= f**2
self.am *= f**-2
self.axlens *= f
self.axes *= f
self.vol = vol | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'scale_to_vol'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Scale ellipoid to a target volume. |
def _init_equiv(self):
gocolored_all = set(self.go2color)
go2obj_usr = self.gosubdag.go2obj
go2color_add = {}
for gocolored_cur, color in self.go2color.items():
if gocolored_cur in go2obj_usr:
goobj = go2obj_usr[gocolored_cur]
goids_equiv = goo... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_init_equiv'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Add equivalent GO IDs to go2color, if necessary. |
def seed_aws_data(ctx, data):
swag = create_swag_from_ctx(ctx)
for k, v in json.loads(data.read()).items():
for account in v['accounts']:
data = {
'description': 'This is an AWS owned account used for {}'.format(k),
'id': account['account_id'],
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'seed_aws_data'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Seeds SWAG from a list of known AWS accounts. |
def with_ctx(func=None):
if not func:
return functools.partial(with_ctx)
@functools.wraps(func)
def func_with_context(_obj, *args, **kwargs):
if 'ctx' not in kwargs or kwargs['ctx'] is None:
with _obj.ctx() as new_ctx:
kwargs['ctx'] = new_ctx
retur... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'with_ctx'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': ['5', '6']}; {'... | Auto create a new context if not available |
def truncate(value: Decimal, n_digits: int) -> Decimal:
return Decimal(math.trunc(value * (10 ** n_digits))) / (10 ** n_digits) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'truncate'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5',... | Truncates a value to a number of decimals places |
def _machinectl(cmd,
output_loglevel='debug',
ignore_retcode=False,
use_vt=False):
prefix = 'machinectl --no-legend --no-pager'
return __salt__['cmd.run_all']('{0} {1}'.format(prefix, cmd),
output_loglevel=output_loglevel,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_machinectl'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11']}; {'id': '4', 'type': 'identifier', 'children': []... | Helper function to run machinectl |
def register(linter):
linter.register_checker(ResourceLeakageChecker(linter))
linter.register_checker(BlacklistedImportsChecker(linter))
linter.register_checker(MovedTestCaseClassChecker(linter))
linter.register_checker(BlacklistedLoaderModulesUsageChecker(linter))
linter.register_checker(Blackliste... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'register'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'linter'}... | Required method to auto register this checker |
def check_event_coverage(patterns, event_list):
proportions = []
for pattern_list in patterns:
proportion = 0
for pattern in pattern_list:
for node in pattern.nodes():
if node in event_list:
proportion += 1.0 / len(pattern_list)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_event_coverage'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Calculate the ratio of patterns that were extracted. |
def utcnow(cls):
obj = datetime.datetime.utcnow()
obj = cls(obj, tzinfo=pytz.utc)
return obj | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'utcnow'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}; {'i... | Return a new datetime representing UTC day and time. |
def _doc_method(klass, func):
argspec = inspect.getfullargspec(func)
if argspec.args and argspec.args[0] == 'self':
del argspec.args[0]
args = inspect.formatargspec(*argspec)
header = "{klass}.{name}{args}".format(klass=klass.__name__,
name=_name(func... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_doc_method'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Generate the docstring of a method. |
def check_dependee_build(self, depender, dependee, dependee_id):
shutit_global.shutit_global_object.yield_to_draw()
cfg = self.cfg
if not (cfg[dependee.module_id]['shutit.core.module.build'] or
self.is_to_be_built_or_is_installed(dependee)):
return 'depender module id:\n\n[' + depender.module_id + ']... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_dependee_build'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'childr... | Checks whether a depended on module is configured to be built. |
def name(self) -> str:
if self.direction == DIRECTION_IN:
return self.raw.get('Input.Name', '')
return self.raw.get('Output.Name', '') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'name'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Return name relevant to direction. |
def pythonise(id, encoding='ascii'):
replace = {'-': '_', ':': '_', '/': '_'}
func = lambda id, pair: id.replace(pair[0], pair[1])
id = reduce(func, replace.iteritems(), id)
id = '_%s' % id if id[0] in string.digits else id
return id.encode(encoding) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pythonise'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'id... | Return a Python-friendly id |
def load_stream(cls, st):
y = yaml.load(st)
return [ Automaton(k, v) for k, v in y.iteritems() ] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load_stream'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Load Automatons from a stream |
def tags(cls, filename, namespace=None):
return cls._raster_opener(filename).tags(ns=namespace) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tags'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cl... | Extract tags from file. |
def log_decl_method(func):
from functools import wraps
@wraps(func)
def with_logging(*args, **kwargs):
self = args[0]
decl = args[2]
log(DEBUG, u" {}: {} {}".format(
self.state['current_step'], decl.name,
serialize(decl.value).strip()).encode('utf-8'))
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'log_decl_method'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'f... | Decorate do_declartion methods for debug logging. |
def _save_pickle(self, filename):
with open(filename, 'wb') as file_handle:
pickle.dump(self._sensors, file_handle, pickle.HIGHEST_PROTOCOL)
file_handle.flush()
os.fsync(file_handle.fileno()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_save_pickle'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Save sensors to pickle file. |
def run(self, cmd):
print datetime.datetime.now()
output = subprocess.Popen(cmd, shell=True)
output = output.communicate()[0]
print output | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'run'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Runs the appropriate command |
def unsign(self, token):
if self.max_age is None:
data = self.signer.unsign(token)
else:
data = self.signer.unsign(token, max_age=self.max_age)
return signing.b64_decode(data.encode()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unsign'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Extract the data from a signed ``token``. |
def _legacy_api_registration_check(self):
logger.debug('Checking registration status...')
machine_id = generate_machine_id()
try:
url = self.api_url + '/v1/systems/' + machine_id
net_logger.info("GET %s", url)
res = self.session.get(url, timeout=self.config.ht... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_legacy_api_registration_check'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': ... | Check registration status through API |
def request(self, action, data={}, headers={}, method='GET'):
headers = {
"Authorization": "Bearer " + self.token,
"Content-Type": "application/json",
"X-Version": "1",
"Accept": "application/json"
}
return Transport.request(self, action, data, hea... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '15']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'request'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'children': [... | Append the REST headers to every request |
def delete(self, key):
self._get_table()
self.table.delete_item(key=key)
log.debug("Deleted item at key '%s'" % (key)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | If this key exists, delete it |
def _detect_buffer_encoding(self, f):
encoding = None
with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as m:
encoding = self._analyze_file(m)
return encoding | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_detect_buffer_encoding'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': []... | Guess by checking BOM, and checking `_special_encode_check`, and using memory map. |
def build_media(self):
logger.debug("Building media directory")
if self.verbosity > 1:
self.stdout.write("Building media directory")
if os.path.exists(self.media_root) and settings.MEDIA_URL:
target_dir = path.join(self.fs_name, self.build_dir, settings.MEDIA_URL.lstrip('... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'build_media'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Build the media files. |
def copy_experiment(experiment):
try:
publisher.publish_experiment_job_log(
log_lines='Copying outputs from experiment `{}` into experiment `{}`'.format(
experiment.original_experiment.unique_name, experiment.unique_name
),
experiment_uuid=experiment.uuid.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'copy_experiment'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'e... | If experiment is a restart, we should resume from last check point |
def loads(cls, data):
rep = cbor.loads(data)
if not isinstance(rep, Sequence):
raise SerializationError('expected a CBOR list')
if len(rep) != 2:
raise SerializationError('expected a CBOR list of 2 items')
metadata = rep[0]
if 'v' not in metadata:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'loads'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'};... | Create a feature collection from a CBOR byte string. |
def _thumbnail_local(self, original_filename, thumb_filename,
thumb_size, thumb_url, crop=None, bg=None,
quality=85):
self._get_path(thumb_filename)
thumb_url_full = url_for('static', filename=thumb_url)
if os.path.exists(thumb_filename):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_thumbnail_local'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '12', '15']}; {'id': '4', 'type': 'i... | Finds or creates a thumbnail for the specified image on the local filesystem. |
def create_tool(self, task):
gp_tool = dict(taskName=task.name,
taskDisplayName=task.display_name,
taskDescription=task.description,
canRunInBackground=True,
taskUri=task.uri)
gp_tool['execute'] = self._execute_t... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'create_tool'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Creates a new GPTool for the toolbox. |
def before(f, chain=False):
def decorator(g):
@wraps(g)
def h(*args, **kargs):
if chain:
return g(f(*args, **kargs))
else:
f(*args, **kargs)
return g(*args, **kargs)
return h
return decorator | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'before'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'f'}; ... | Runs f before the decorated function. |
def finish_hanging(self):
if self.groups.starting_signature:
if self.groups.starting_group:
self.add_tokens_for_group(with_pass=True)
elif self.groups.starting_single:
self.add_tokens_for_single(ignore=True) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'finish_hanging'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Add tokens for hanging singature if any |
def coerce(value):
if isinstance(value, StringCell):
return value
elif isinstance(value, (str, unicode)):
return StringCell(value)
else:
raise CoercionFailure("Cannot coerce %s to StringCell" % (value)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'coerce'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'value'}; {... | Turns value into a string |
def build_specfile_filesection(spec, files):
str = '%files\n'
if 'X_RPM_DEFATTR' not in spec:
spec['X_RPM_DEFATTR'] = '(-,root,root)'
str = str + '%%defattr %s\n' % spec['X_RPM_DEFATTR']
supported_tags = {
'PACKAGING_CONFIG' : '%%config %s',
'PACKAGING_CONFIG_NOREPLACE... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'build_specfile_filesection'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children':... | builds the %file section of the specfile |
def wrap_io_os_err(e):
msg = ''
if e.strerror:
msg = e.strerror
if e.message:
msg = ' '.join([e.message, msg])
if e.filename:
msg = ': '.join([msg, e.filename])
return msg | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'wrap_io_os_err'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'e'... | Formats IO and OS error messages for wrapping in FSQExceptions |
def orcid_uri_to_orcid(value):
"Strip the uri schema from the start of ORCID URL strings"
if value is None:
return value
replace_values = ['http://orcid.org/', 'https://orcid.org/']
for replace_value in replace_values:
value = value.replace(replace_value, '')
return value | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'orcid_uri_to_orcid'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Strip the uri schema from the start of ORCID URL strings |
def _make_write_func(file_obj):
if file_obj is None:
return ffi.NULL
@ffi.callback("cairo_write_func_t", error=constants.STATUS_WRITE_ERROR)
def write_func(_closure, data, length):
file_obj.write(ffi.buffer(data, length))
return constants.STATUS_SUCCESS
return write_func | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_make_write_func'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Return a CFFI callback that writes to a file-like object. |
def rename_annotations(self, sentence):
annotations = []
for token in sentence:
data = {CLAUSE_IDX: token[CLAUSE_IDX]}
if CLAUSE_ANNOT in token:
if 'KINDEL_PIIR' in token[CLAUSE_ANNOT]:
data[CLAUSE_ANNOTATION] = CLAUSE_BOUNDARY
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'rename_annotations'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Function that renames and restructures clause information. |
def deleteAnnot(self, fannot):
CheckParent(self)
val = _fitz.Page_deleteAnnot(self, fannot)
if val:
val.thisown = True
val.parent = weakref.proxy(self)
val.parent._annot_refs[id(val)] = val
fannot._erase()
return val | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'deleteAnnot'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Delete annot if PDF and return next one |
def fit_transform_poof(self, X, y=None, outpath=None, **kwargs):
self.fit_transform(X, y, **kwargs)
self.poof(outpath, **kwargs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'fit_transform_poof'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12']}; {'id': '4', 'type': 'identifier', 'c... | Fit the model and transforms and then call poof. |
def keep_sources(self, keep):
if self.unmixing_ is None or self.mixing_ is None:
raise RuntimeError("No sources available (run do_mvarica first)")
n_sources = self.mixing_.shape[0]
self.remove_sources(np.setdiff1d(np.arange(n_sources), np.array(keep)))
return self | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'keep_sources'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Keep only the specified sources in the decomposition. |
def reset_headers(self):
rows = self.rowCount()
cols = self.columnCount()
for r in range(rows):
self.setVerticalHeaderItem(r, QTableWidgetItem(str(r)))
for c in range(cols):
self.setHorizontalHeaderItem(c, QTableWidgetItem(str(c)))
self.setColum... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reset_headers'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Update the column and row numbering in the headers. |
def always_fail(cls, request) -> [
(200, 'Ok', String),
(406, 'Not Acceptable', Void)]:
task_id = uuid4().hex.upper()[:5]
log.info('Starting always FAILING task {}'.format(task_id))
for i in range(randint(0, MAX_LOOP_DURATION)):
yield
Respond(406)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6', '16']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'always_fail'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Perform an always failing task. |
def nchunks_initialized(self):
prog = re.compile(r'\.'.join([r'\d+'] * min(1, self.ndim)))
return sum(1 for k in listdir(self.chunk_store, self._path) if prog.match(k)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'nchunks_initialized'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | The number of chunks that have been initialized with some data. |
def blackbox(blackbox):
if tuple(sorted(blackbox.output_indices)) != blackbox.output_indices:
raise ValueError('Output indices {} must be ordered'.format(
blackbox.output_indices))
partition(blackbox.partition)
for part in blackbox.partition:
if not set(part) & set(blackbox.outpu... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'blackbox'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'blackbox... | Validate a macro blackboxing. |
def format_csv(self, delim=',', qu='"'):
res = qu + self.name + qu + delim
if self.data:
for d in self.data:
res += qu + str(d) + qu + delim
return res + '\n' | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'format_csv'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Prepares the data in CSV format |
def apply(self, reboot=False):
self.root.use_virtual_addresses = True
self.root.manage.manage = True
self.root.mode = 'new'
self.root.init_boot = reboot
self.client.set_profile(self.root.get_json()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'apply'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Apply the configuration to iRMC. |
def _secrets_table_name(environment=None, stage=None):
if environment is None:
environment = os.environ.get("HUMILIS_ENVIRONMENT")
if stage is None:
stage = os.environ.get("HUMILIS_STAGE")
if environment:
if stage:
return "{environment}-{stage}-secrets".format(**locals())... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_secrets_table_name'}; {'id': '3', 'type': 'parameters', 'children': ['4', '7']}; {'id': '4', 'type': 'default_parameter', 'children'... | Name of the secrets table associated to a humilis deployment. |
def focusOutEvent(self, ev):
Kittens.widgets.ClickableTreeWidget.focusOutEvent(self, ev)
wid = QApplication.focusWidget()
while wid:
if wid is self:
return
wid = wid.parent()
self._startOrStopEditing() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'focusOutEvent'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Redefine focusOut events to stop editing |
def run (self):
try:
self.run_checked()
except KeyboardInterrupt:
thread.interrupt_main()
except Exception:
self.internal_error() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'run'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id'... | Handle keyboard interrupt and other errors. |
def forward(self, x: torch.Tensor, sublayer: Callable[[torch.Tensor], torch.Tensor]) -> torch.Tensor:
return x + self.dropout(sublayer(self.norm(x))) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '26', '30']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'forward'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '11']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Apply residual connection to any sublayer with the same size. |
def cleanup(self, sched, coro):
try:
sched.sigwait[self.name].remove((self, coro))
except ValueError:
pass
return True | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cleanup'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Remove this coro from the waiting for signal queue. |
def pre_flight_headers(self, request, methods):
methods = ', '.join(m.value for m in methods)
headers = {
'Allow': methods,
'Cache-Control': 'no-cache, no-store'
}
allow_origin = self.allow_origin(request)
if allow_origin:
headers = dict_filter... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pre_flight_headers'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': []... | Generate pre-flight headers. |
def valid_max_age(number):
"Validate a cookie Max-Age"
if isinstance(number, basestring):
try:
number = long(number)
except (ValueError, TypeError):
return False
if number >= 0 and number % 1 == 0:
return True
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'valid_max_age'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'num... | Validate a cookie Max-Age |
def reboot(env, identifier, hard):
hardware_server = env.client['Hardware_Server']
mgr = SoftLayer.HardwareManager(env.client)
hw_id = helpers.resolve_id(mgr.resolve_ids, identifier, 'hardware')
if not (env.skip_confirmations or
formatting.confirm('This will power off the server with id %s. ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reboot'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Reboot an active server. |
def machine(self):
if not self._ptr:
raise BfdException("BFD not initialized")
return _bfd.get_bfd_attribute(self._ptr, BfdAttributes.FLAVOUR) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'machine'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Return the flavour attribute of the BFD file being processed. |
def parse_affine(self, hdat, dataobj=None):
if 'affine' in hdat: return to_affine(hdat['affine'])
else: return to_affine(self.default_affine()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_affine'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Parses the affine out of the given header data and yields it. |
def setup_tree(ctx, verbose=None, root=None, tree_dir=None, modules_dir=None):
print('Setting up the tree')
ctx.run('python bin/setup_tree.py -t {0} -r {1} -m {2}'.format(tree_dir, root, modules_dir)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setup_tree'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11', '14']}; {'id': '4', 'type': 'identifier', 'children... | Sets up the SDSS tree enviroment |
def _update_axes_color(self, color):
prop_x = self.axes_actor.GetXAxisCaptionActor2D().GetCaptionTextProperty()
prop_y = self.axes_actor.GetYAxisCaptionActor2D().GetCaptionTextProperty()
prop_z = self.axes_actor.GetZAxisCaptionActor2D().GetCaptionTextProperty()
if color is None:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_update_axes_color'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Internal helper to set the axes label color |
def check_path_matches_patterns(path, patterns):
path = os.path.abspath(path)
for patt in patterns:
if isinstance(patt, six.string_types):
if path == patt:
return True
elif patt.search(path):
return True
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_path_matches_patterns'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children'... | Check if the path matches at least one of the provided patterns. |
def from_acl_response(acl_response):
if 'read' in acl_response:
read_acl = AclType.from_acl_response(acl_response['read'])
return Acl(read_acl)
else:
raise ValueError('Response does not contain read ACL') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'from_acl_response'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Takes JSON response from API and converts to ACL object |
def write_file(writer, filename):
for line in txt_line_iterator(filename):
writer.write(line)
writer.write("\n") | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'w... | Write all of lines from file using the writer. |
def add_autoscaling(subparsers):
autoscaling_parser = subparsers.add_parser(
'autoscaling',
help=runner.create_scaling_policy.__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
autoscaling_parser.set_defaults(func=runner.create_scaling_policy) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'add_autoscaling'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Auto Scaling Group Policy subcommands. |
def _save_percolator(self):
index = Content.search_objects.mapping.index
query_filter = self.get_content(published=False).to_dict()
q = {}
if "query" in query_filter:
q = {"query": query_filter.get("query", {})}
else:
return
q["sponsored"] = bool(s... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_save_percolator'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Saves the query field as an elasticsearch percolator |
def print_param_values(self_):
self = self_.self
for name,val in self.param.get_param_values():
print('%s.%s = %s' % (self.name,name,val)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'print_param_values'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Print the values of all this object's Parameters. |
def getheader(self, field, default=''):
if self.headers:
for header in self.headers:
if field.lower() == header.lower():
return self.headers[header]
return default | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getheader'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Returns the HTTP response header field, case insensitively |
def toggleDrawingSensitive(self, drawing=True):
self.actions.editMode.setEnabled(not drawing)
if not drawing and self.beginner():
print('Cancel creation.')
self.canvas.setEditing(True)
self.canvas.restoreCursor()
self.actions.create.setEnabled(True) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'toggleDrawingSensitive'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [],... | In the middle of drawing, toggling between modes should be disabled. |
def node_val_set(self, graph, node, key, branch, turn, tick, value):
if (branch, turn, tick) in self._btts:
raise TimeError
self._btts.add((branch, turn, tick))
graph, node, key, value = map(self.pack, (graph, node, key, value))
self._nodevals2set.append((graph, node, key, br... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'node_val_set'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '10', '11']}; {'id': '4', 'type': 'ident... | Set a key-value pair on a node at a specific branch and revision |
def _init_request_hooks(self):
for method_type in ('pre', 'post'):
for method in _METHODS:
event = getattr(self.app, 'on_' + method_type + '_' + method)
event_hook = getattr(hooks, method_type + '_' + method)
event += event_hook | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_init_request_hooks'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | initialize pre request hooks |
def babel_extract(config, input, output, target, keywords):
click.echo(
click.style(
"Starting Extractions config:{0} input:{1} output:{2} keywords:{3}".format(
config, input, output, keywords
),
fg="green",
)
)
keywords = " -k ".join(keywo... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'babel_extract'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'children... | Babel, Extracts and updates all messages marked for translation |
def mutable_record_transform(cls):
if not (len(cls.bases) > 0
and isinstance(cls.bases[0], astroid.Call)
and cls.bases[0].func.as_string() == 'mutablerecords.Record'):
return
try:
if len(cls.bases[0].args) >= 2:
for a in cls.bases[0].args[1].elts:
cls.locals[a] = [None]
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'mutable_record_transform'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Transform mutable records usage by updating locals. |
def exec_cmd(self, cmdstr):
parts = cmdstr.split()
if len(parts):
cmd, args = parts[0], parts[1:]
self._dispatch(cmd, args)
else:
pass | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'exec_cmd'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Parse line from CLI read loop and execute provided command |
def euclidean_dissim(a, b, **_):
if np.isnan(a).any() or np.isnan(b).any():
raise ValueError("Missing values detected in numerical columns.")
return np.sum((a - b) ** 2, axis=1) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'euclidean_dissim'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Euclidean distance dissimilarity function |
def parse_int_arg(name, default):
return default if request.args.get(name) is None \
else int(request.args.get(name)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_int_arg'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Return a given URL parameter as int or return the default value. |
def scrape_links(html, engine):
soup = BeautifulSoup(html, 'lxml')
links = []
if engine == 'd':
results = soup.findAll('a', {'class': 'result__a'})
for result in results:
link = result.get('href')[15:]
link = link.replace('/blob/', '/raw/')
links.append(link)
elif engine == 'g':
results = soup.findAl... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'scrape_links'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | function to scrape file links from html response |
def _prune(self):
delkeys = [k for k in self.keys() if k not in self.__ringbuffer]
for k in delkeys:
super(KRingbuffer,self).__delitem__(k) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_prune'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Primitive way to keep dict in sync with RB. |
def map(self, method: str, *args, _threaded: bool = True, **kwargs
) -> "AttrIndexedDict":
"For all stored items, run a method they possess."
work = lambda item: getattr(item, method)(*args, **kwargs)
if _threaded:
pool = ThreadPool(int(config.CFG["GENERAL"]["parallel_requ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18', '20']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'map'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '9', '11', '16']}; {'id': '4', 'type': 'identifier', 'children'... | For all stored items, run a method they possess. |
def make_csv_tables(self):
logger.info('Generate csv report tables')
report_parts = []
for sr in self.subreports:
for data_item in sr.report_data:
report_parts.append(TextPart(fmt='csv', text=data_item.csv, ext='csv'))
return report_parts | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'make_csv_tables'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Builds the report as a list of csv tables with titles. |
def spin_px(self):
return conversions.primary_spin(self.mass1, self.mass2, self.spin1x,
self.spin2x) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'spin_px'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Returns the x-component of the spin of the primary mass. |
def _do_batched_write_command(
namespace, operation, command, docs, check_keys, opts, ctx):
if ctx.sock_info.compression_context:
return _batched_write_command_compressed(
namespace, operation, command, docs, check_keys, opts, ctx)
return _batched_write_command(
namespace, op... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_do_batched_write_command'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8', '9', '10']}; {'id': '4', 'type':... | Batched write commands entry point. |
def on_rulebook(self, *args):
if self.rulebook is None:
return
self.rulebook.connect(self._trigger_redata, weak=False)
self.redata() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_rulebook'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Make sure to update when the rulebook changes |
def missing_db_response(func):
@wraps(func)
def with_exception_handling(*args, **kwargs):
try:
return func(*args, **kwargs)
except ConnectionError as error:
return (dict(error='Unable to connect to Configuration Db.',
error_message=str(error),
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'missing_db_response'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Decorator to check connection exceptions |
def log_before_after(name: str, desc: str):
def func_decorator(f):
@ft.wraps(f)
def wrapper(*args, **kwargs):
LOG.info("\n%s - %s", name, desc)
res = f(*args, **kwargs)
if StepResult.ERROR not in res:
LOG.info("%s - OK\n", name)
else:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'log_before_after'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5... | Log customized stirng before & after running func. |
async def get_default(cls):
data = await cls._handler.read(id=cls._default_fabric_id)
return cls(data) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'get_default'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}... | Get the 'default' Fabric for the MAAS. |
async def on_shutdown(app):
for method in app.get("close_methods", []):
logger.debug("Calling < %s >", method)
if asyncio.iscoroutinefunction(method):
await method()
else:
method() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'on_shutdown'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'app'}... | app SHUTDOWN event handler |
def sphinx(self):
try:
assert __IPYTHON__
classdoc = ''
except (NameError, AssertionError):
scls = self.sphinx_class()
classdoc = ' ({})'.format(scls) if scls else ''
prop_doc = '**{name}**{cls}: {doc}{info}'.format(
name=self.name,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sphinx'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Generate Sphinx-formatted documentation for the Property |
def _read_credential_file(self, cfg):
self.username = cfg.get("rackspace_cloud", "username")
try:
self.password = cfg.get("rackspace_cloud", "api_key", raw=True)
except ConfigParser.NoOptionError as e:
self.password = cfg.get("rackspace_cloud", "password", raw=True) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_read_credential_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Parses the credential file with Rackspace-specific labels. |
async def write(self, data, eof = False, buffering = True):
if not self.outputstream:
self.outputstream = Stream()
self._startResponse()
elif (not buffering or eof) and not self._sendHeaders:
self._startResponse()
if not isinstance(data, bytes):
da... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Write output to current output stream |
def copyFilepath( self ):
clipboard = QApplication.instance().clipboard()
clipboard.setText(self.filepath())
clipboard.setText(self.filepath(), clipboard.Selection) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'copyFilepath'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Copies the current filepath contents to the current clipboard. |
def _lcm(a, b):
if a == 0 or b == 0:
return 0
else:
return abs(a * b) // gcd(a, b) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_lcm'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'a'}; {'... | Least Common Multiple between 2 integers. |
def remove_from_gallery(self):
url = self._imgur._base_url + "/3/gallery/{0}".format(self.id)
self._imgur._send_request(url, needs_auth=True, method='DELETE')
if isinstance(self, Image):
item = self._imgur.get_image(self.id)
else:
item = self._imgur.get_album(self... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'remove_from_gallery'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Remove this image from the gallery. |
def model_exchange(model):
lower_default, upper_default = None, None
if model.default_flux_limit is not None:
lower_default = -model.default_flux_limit
upper_default = model.default_flux_limit
compounds = []
for compound, reaction_id, lower, upper in sorted(
itervalues(model.... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'model_exchange'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'mo... | Return exchange definition as YAML dict. |
def load(self):
basepath = os.path.dirname(os.path.abspath(__file__))
filename = os.sep.join([basepath, c.FOLDER_JSON, c.FILE_GAME_VERSIONS])
Handler.ALL_VERS_DATA = {}
with open(filename, "r") as f:
data = json.loads( f.read() )
self.update(data)
self._update... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'load'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | load ALL_VERS_DATA from disk |
def submit_button(*args, **kwargs):
submit_button = wtforms.SubmitField(*args, **kwargs)
submit_button.input_type = 'submit_button'
return submit_button | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'submit_button'}; {'id': '3', 'type': 'parameters', 'children': ['4', '6']}; {'id': '4', 'type': 'list_splat_pattern', 'children': ['5'... | Create a submit button |
def visit_Subscript(self, node: ast.Subscript) -> Any:
value = self.visit(node=node.value)
a_slice = self.visit(node=node.slice)
result = value[a_slice]
self.recomputed_values[node] = result
return result | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'visit_Subscript'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [],... | Visit the ``slice`` and a ``value`` and get the element. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.