code stringlengths 51 2.34k | sequence stringlengths 1.16k 13.1k | docstring stringlengths 11 171 |
|---|---|---|
def request(self, method, url, params=None, **aio_kwargs):
oparams = {
'oauth_consumer_key': self.consumer_key,
'oauth_nonce': sha1(str(RANDOM()).encode('ascii')).hexdigest(),
'oauth_signature_method': self.signature.name,
'oauth_timestamp': str(int(time.time())),... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'request'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '10']}; {'id': '4', 'type': 'identifier', 'children': [... | Make a request to provider. |
def class_from_string(name):
module_name, class_name = name.rsplit('.', 1)
__import__(module_name)
module = sys.modules[module_name]
return getattr(module, class_name) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'class_from_string'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Get a python class object from its name |
def _collect_headers(self):
res = []
for prop in self.get_sorted_columns():
main_infos = self._get_prop_infos(prop)
if self._is_excluded(prop, main_infos):
continue
if isinstance(prop, RelationshipProperty):
main_infos = self._collect_r... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_collect_headers'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Collect headers from the models attribute info col |
def start_stream(self, stream_type, **stream_parameters):
if stream_type:
self.subscribe(stream_type, **stream_parameters)
else:
self.handle_error("You need to set a stream type") | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'start_stream'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Starts new stream for given type with given parameters |
def save_state(self):
set_setting('lastSourceDir', self.source_directory.text())
set_setting('lastOutputDir', self.output_directory.text())
set_setting(
'useDefaultOutputDir', self.scenario_directory_radio.isChecked()) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'save_state'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Save current state of GUI to configuration file. |
def xmlns(source):
namespaces = {}
events=("end", "start-ns", "end-ns")
for (event, elem) in iterparse(source, events):
if event == "start-ns":
prefix, ns = elem
namespaces[prefix] = ns
elif event == "end":
break
if hasattr(source, "seek"):
sou... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'xmlns'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'source'}; {... | Returns a map of prefix to namespace for the given XML file. |
def new(cls, package):
partname = package.next_partname("/word/header%d.xml")
content_type = CT.WML_HEADER
element = parse_xml(cls._default_header_xml())
return cls(partname, content_type, element, package) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'new'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}; {... | Return newly created header part. |
def _send_invitation(self, enrollment, event):
self.log('Sending enrollment status mail to user')
self._send_mail(self.config.invitation_subject, self.config.invitation_mail, enrollment, event) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_send_invitation'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Send an invitation mail to an open enrolment |
def do_step(self, args):
self._do_print_from_last_cmd = True
self._interp.step_into()
return True | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'do_step'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Step INTO the next statement |
def ping_entry(self, entry):
entry_url = '%s%s' % (self.ressources.site_url,
entry.get_absolute_url())
categories = '|'.join([c.title for c in entry.categories.all()])
try:
reply = self.server.weblogUpdates.extendedPing(
self.ressources.c... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'ping_entry'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Ping an entry to a directory. |
def clear_to_reset(self, config_vars):
super(TileManagerState, self).clear_to_reset(config_vars)
self.registered_tiles = self.registered_tiles[:1]
self.safe_mode = False
self.debug_mode = False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'clear_to_reset'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | Clear to the state immediately after a reset. |
def check_image_file_header(filename):
with tf.gfile.Open(filename, 'rb') as f:
magic = read32(f)
read32(f)
rows = read32(f)
cols = read32(f)
if magic != 2051:
raise ValueError('Invalid magic number %d in MNIST file %s' % (magic,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'check_image_file_header'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Validate that filename corresponds to images for the MNIST dataset. |
def _add_intent_interactive(self, intent_num=0):
print ("Name of intent number : ", intent_num)
slot_type_mappings = load_builtin_slots()
intent_name = read_from_user(str)
print ("How many slots?")
num_slots = read_from_user(int)
slot_list = []
for... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_add_intent_interactive'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': []... | Interactively add a new intent to the intent schema object |
def _iter_backtrack(ex, rand=False):
if ex is One:
yield dict()
elif ex is not Zero:
if rand:
v = random.choice(ex.inputs) if rand else ex.top
else:
v = ex.top
points = [{v: 0}, {v: 1}]
if rand:
random.shuffle(points)
for point ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_iter_backtrack'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Iterate through all satisfying points using backtrack algorithm. |
def process_object(obj):
"Hook to process the object currently being displayed."
invalid_options = OptsMagic.process_element(obj)
if invalid_options: return invalid_options
OutputMagic.info(obj) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'process_object'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ob... | Hook to process the object currently being displayed. |
def __get_file(self, file):
file_object = None
if file['name'] in request.files:
file_object = request.files[file['name']]
clean_filename = secure_filename(file_object.filename)
if clean_filename == '':
return file_object
if file_obj... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__get_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Get request file and do a security check |
def _adjust_width(self):
if self.bar_width > self.max_iter:
self.bar_width = int(self.max_iter) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_adjust_width'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | Shrinks bar if number of iterations is less than the bar width |
def pong_received(self, payload=None):
if self._timer is not None:
self._timer.cancel()
self._failures = 0
asyncio.async(self.send_ping(payload=payload)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pong_received'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Called when a pong is received. So the timer is cancelled |
def evaluate_binop_math(self, operation, left, right, **kwargs):
if not operation in self.binops_math:
raise ValueError("Invalid math binary operation '{}'".format(operation))
if left is None or right is None:
return None
if not isinstance(left, (list, ListIP)):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'evaluate_binop_math'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'c... | Evaluate given mathematical binary operation with given operands. |
def validate_headers(self):
super().validate()
self.validate_header(self.channeldir, self.channelinfo, CHANNEL_INFO_HEADER)
self.validate_header(self.channeldir, self.contentinfo, CONTENT_INFO_HEADER)
if self.has_exercises():
self.validate_header(self.channeldir, self.exercis... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'validate_headers'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Check if CSV metadata files have the right format. |
def find_tasks(self, overrides):
tasks = self.default_tasks()
configuration = self.collector.configuration
for image in list(configuration["images"].keys()):
path = configuration.path(["images", image, "tasks"], joined="images.{0}.tasks".format(image))
nxt = configuration... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'find_tasks'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Find the custom tasks and record the associated image with each task |
def copy(self, klass=_x):
chain = super().copy()
new_chain = klass(chain._args[0])
new_chain._tokens = [[
chain.compose, [], {},
]]
return new_chain | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'copy'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | A new chain beginning with the current chain tokens and argument. |
def delete(self):
resp = self.r_session.delete(self.database_url)
resp.raise_for_status() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'... | Deletes the current database from the remote instance. |
def setup(self, *args, **kwargs):
if self.comp is None:
self.comp = Compiler(*args, **kwargs)
else:
self.comp.setup(*args, **kwargs) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setup'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '7']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Set parameters for the compiler. |
def url(self):
return reverse('archives:thread_view',
args=[self.mailinglist.name,
self.thread.subject_token]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'url'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id'... | Shortcut to get thread url |
def verify(opts):
resources = _load(opts.resources, opts.output_dir)
if opts.all:
opts.resource_names = ALL
invalid = _invalid(resources, opts.resource_names)
if not invalid:
if not opts.quiet:
print("All resources successfully downloaded")
return 0
else:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'verify'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'opts'}; {'... | Verify that one or more resources were downloaded successfully. |
def scan(self):
found = []
for addr in range(0,0x80):
try:
self._i2c_bus.read_byte(addr)
except OSError:
continue
found.append(addr)
return found | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'scan'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id... | Try to read a byte from each address, if you get an OSError it means the device isnt there |
def estimate_band_connection(prev_eigvecs, eigvecs, prev_band_order):
metric = np.abs(np.dot(prev_eigvecs.conjugate().T, eigvecs))
connection_order = []
for overlaps in metric:
maxval = 0
for i in reversed(range(len(metric))):
val = overlaps[i]
if i in connection_orde... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'estimate_band_connection'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'childre... | A function to order the phonon eigenvectors taken from phonopy |
def download_seq_file(self, outdir, force_rerun=False):
uniprot_fasta_file = download_uniprot_file(uniprot_id=self.id,
filetype='fasta',
outdir=outdir,
force_r... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'download_seq_file'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [],... | Download and load the UniProt FASTA file |
def _text_image(page):
img = None
alt = page.data.get('label') or page.data.get('title')
source = _image(page)
if source:
img = "" % (alt, source)
return img | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_text_image'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'page'... | returns text image URL |
def append_vobject(self, ical, filename=None):
if not filename:
filename = self._filename
elif filename not in self._reminders:
return
with self._lock:
outdat = self.to_reminders(ical)
open(filename, 'a').write(outdat)
return Remind._get_ui... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'append_vobject'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'v... | Append a Remind command generated from the iCalendar to the file |
def log_config(verbose=1):
if verbose == 0:
level = logging.WARNING
fmt = "%(module)s: %(message)s"
elif verbose == 1:
level = logging.INFO
fmt = "%(module)s [@%(lineno)s]: %(message)s"
else:
level = logging.DEBUG
fmt = "%(module)s [%(lineno)s]: %(levelname)s:... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'log_config'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': ['5', '6']}; ... | Set up logging the way I like it. |
def pending():
upgrader = InvenioUpgrader()
logger = upgrader.get_logger()
try:
upgrades = upgrader.get_upgrades()
if not upgrades:
logger.info("All upgrades have been applied.")
return
logger.info("Following upgrade(s) are ready to be applied:")
for u... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pending'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '11', '19']}; {'id': '5',... | Command for showing upgrades ready to be applied. |
def _calcDepths(self):
depths = self.layerBoundaries.mean(axis=1)[1:]
depth_y = []
for y in self.y:
if y in ['p23', 'b23', 'nb23']:
depth_y = np.r_[depth_y, depths[0]]
elif y in ['p4', 'ss4(L23)', 'ss4(L4)', 'b4', 'nb4']:
depth_y = np.r_[de... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_calcDepths'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | return the cortical depth of each subpopulation |
def serialize_number(x, fmt=SER_BINARY, outlen=None):
ret = b''
if fmt == SER_BINARY:
while x:
x, r = divmod(x, 256)
ret = six.int2byte(int(r)) + ret
if outlen is not None:
assert len(ret) <= outlen
ret = ret.rjust(outlen, b'\0')
return ret... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'serialize_number'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [],... | Serializes `x' to a string of length `outlen' in format `fmt' |
def contrast(self, color, step):
hls = colorsys.rgb_to_hls(*self.rgb(color))
if self.is_light(color):
return colorsys.hls_to_rgb(hls[0], hls[1] - step, hls[2])
else:
return colorsys.hls_to_rgb(hls[0], hls[1] + step, hls[2]) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'contrast'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | if color is dark, will return a lighter one, otherwise darker |
def download(self):
url = 'http://forms.irs.gov/app/pod/dataDownload/fullData'
r = requests.get(url, stream=True)
with open(self.zip_path, 'wb') as f:
for chunk in r.iter_content(chunk_size=30720):
logger.debug('Downloading...')
f.write(chunk)
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'download'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; ... | Download the archive from the IRS website. |
def extract_feed(
inpath: str, outpath: str, view: View, config: nx.DiGraph = None
) -> str:
config = default_config() if config is None else config
config = remove_node_attributes(config, "converters")
feed = load_feed(inpath, view, config)
return write_feed_dangerously(feed, outpath) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '23', '25']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'extract_feed'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8', '12', '16']}; {'id': '4', 'type': 'typed_parameter', '... | Extract a subset of a GTFS zip into a new file |
def sign_message(body: ByteString, secret: Text) -> Text:
return 'sha1={}'.format(
hmac.new(secret.encode(), body, sha1).hexdigest()
) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '12', '14']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'sign_message'}; {'id': '3', 'type': 'parameters', 'children': ['4', '8']}; {'id': '4', 'type': 'typed_parameter', 'children': [... | Compute a message's signature. |
def register_cmdfinalization_hook(self, func: Callable[[plugin.CommandFinalizationData],
plugin.CommandFinalizationData]) -> None:
self._validate_cmdfinalization_callable(func)
self._cmdfinalization_hooks.append(func) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '20', '22']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'register_cmdfinalization_hook'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', '... | Register a hook to be called after a command is completed, whether it completes successfully or not. |
def getFields(cls) :
s = set(cls.columns.keys())
s.remove('json')
s.remove('raba_id')
return s | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'getFields'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cls'}; ... | returns a set of the available fields. In order to be able ti securely loop of the fields, "raba_id" and "json" are not included in the set |
def use_comparative_sequence_rule_enabler_rule_view(self):
self._object_views['sequence_rule_enabler_rule'] = COMPARATIVE
for session in self._get_provider_sessions():
try:
session.use_comparative_sequence_rule_enabler_rule_view()
except AttributeError:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'use_comparative_sequence_rule_enabler_rule_view'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifi... | Pass through to provider SequenceRuleEnablerRuleLookupSession.use_comparative_sequence_rule_enabler_rule_view |
def _handle_call(self, actual_call, stubbed_call):
self._actual_calls.append(actual_call)
use_call = stubbed_call or actual_call
return use_call.return_value | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_handle_call'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Extends Stub call handling behavior to be callable by default. |
def view_contents(token, dstore):
try:
desc = dstore['oqparam'].description
except KeyError:
desc = ''
data = sorted((dstore.getsize(key), key) for key in dstore)
rows = [(key, humansize(nbytes)) for nbytes, key in data]
total = '\n%s : %s' % (
dstore.filename, humansize(os.p... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'view_contents'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | Returns the size of the contents of the datastore and its total size |
def value(self):
dtype = lib.get_data_type(self._data)
dvalue = lib.get_data_value(self._data)
if dvalue == ffi.NULL:
return None
return self.python_value(dtype, dvalue) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'value'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'i... | Return the DATA_OBJECT stored value. |
def write_config(cfg):
cfg_path = '/usr/local/etc/freelan'
cfg_file = 'freelan_TEST.cfg'
cfg_lines = []
if not isinstance(cfg, FreelanCFG):
if not isinstance(cfg, (list, tuple)):
print("Freelan write input can not be processed.")
return
cfg_lines = cfg
else:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write_config'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'cfg'... | try writing config file to a default directory |
def _other_pipeline_samples(summary_file, cur_samples):
cur_descriptions = set([s[0]["description"] for s in cur_samples])
out = []
if utils.file_exists(summary_file):
with open(summary_file) as in_handle:
for s in yaml.safe_load(in_handle).get("samples", []):
if s["descr... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_other_pipeline_samples'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': []... | Retrieve samples produced previously by another pipeline in the summary output. |
def _get_from(self, required_for=None):
line = self.next_line()
if line is None:
return None
elif line.startswith(b'from '):
return line[len(b'from '):]
elif required_for:
self.abort(errors.MissingSection, required_for, 'from')
else:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_from'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Parse a from section. |
def write_csv_header(mol, csv_writer):
line = []
line.append('id')
line.append('status')
queryList = mol.properties.keys()
for queryLabel in queryList:
line.append(queryLabel)
csv_writer.writerow(line) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'write_csv_header'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Write the csv header |
def delete_vpnservice(self, vpnservice):
vpnservice_id = self._find_vpnservice_id(vpnservice)
ret = self.network_conn.delete_vpnservice(vpnservice_id)
return ret if ret else True | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'delete_vpnservice'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Deletes the specified VPN service |
def django_js(context, jquery=True, i18n=True, csrf=True, init=True):
return {
'js': {
'minified': not settings.DEBUG,
'jquery': _boolean(jquery),
'i18n': _boolean(i18n),
'csrf': _boolean(csrf),
'init': _boolean(init),
}
} | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '17']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'django_js'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8', '11', '14']}; {'id': '4', 'type': 'identifier', 'children'... | Include Django.js javascript library in the page |
def daily_returns(ts, **kwargs):
relative = kwargs.get('relative', 0)
return returns(ts, delta=BDay(), relative=relative) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'daily_returns'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value':... | re-compute ts on a daily basis |
def run_shell(args: dict) -> int:
if args.get('project_directory'):
return run_batch(args)
shell = CauldronShell()
if in_project_directory():
shell.cmdqueue.append('open "{}"'.format(os.path.realpath(os.curdir)))
shell.cmdloop()
return 0 | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'run_shell'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5', '6']... | Run the shell sub command |
def split_sentences(s, pad=0):
sentences = []
for index, sentence in enumerate(s.split('. ')):
padding = ''
if index > 0:
padding = ' ' * (pad + 1)
if sentence.endswith('.'):
sentence = sentence[:-1]
sentences.append('%s %s.' % (padding, sentence.strip()))... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'split_sentences'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Split sentences for formatting. |
def setSizeMetadata(self, size):
assert((self.needMetadataUpdate(CoverImageMetadata.SIZE)) or
(self.size == size))
self.size = size
self.check_metadata &= ~CoverImageMetadata.SIZE | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'setSizeMetadata'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value... | Set size image metadata to what has been reliably identified. |
def _interpolation_escape(self, namespace):
for key, val in namespace.items():
if '%' in val:
namespace[key] = self.INTERPOLATION_ESCAPE.sub(lambda match: '%' + match.group(0), val) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_interpolation_escape'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Re-escape interpolation strings. |
def conflicted(path_to_file):
for line in open(path_to_file, 'r'):
for marker in '>="<':
if line.startswith(marker * 8):
return True
return False | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'conflicted'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'path_t... | Whether there are any conflict markers in that file |
def serialize(self):
if self.response is not None:
return {'messaging_type': 'RESPONSE'}
if self.update is not None:
return {'messaging_type': 'UPDATE'}
if self.tag is not None:
return {
'messaging_type': 'MESSAGE_TAG',
'tag': s... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'serialize'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Generates the messaging-type-related part of the message dictionary. |
def __arg_comps(self):
fun = self.argv[0] if self.argv else ''
parsed = salt.utils.args.parse_input(
self.argv[1:],
condition=False,
no_parse=self.opts.get('no_parse', []))
args = parsed[0]
kws = parsed[1]
return fun, args, kws | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '__arg_comps'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Return the function name and the arg list |
def pks(self):
if self._primary_keys is None:
self._primary_keys = list(
self.queryset.values_list('pk', flat=True))
return self._primary_keys | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'pks'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id'... | Lazy-load the primary keys. |
def _get_go2nt(goids, go2nt_all):
go_nt_list = []
goids_seen = set()
for goid_usr in goids:
ntgo = go2nt_all[goid_usr]
goid_main = ntgo.id
if goid_main not in goids_seen:
goids_seen.add(goid_main)
go_nt_list.append((goid_main, ntgo))
return go_nt_list | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_go2nt'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'g... | Get user go2nt using main GO IDs, not alt IDs. |
def finish():
pretend = context.get('pretend', False)
if not pretend and (git.staged() or git.unstaged()):
log.err(
"You have uncommitted changes in your repo!\n"
"You need to stash them before you merge the hotfix branch"
)
sys.exit(1)
develop = conf.get('git... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'finish'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '15', '48', '58', '68', '7... | Merge current feature into develop. |
def build(self, name):
"build an object for the specified typename as defined in the schema"
if isinstance(name, basestring):
type = self.resolver.find(name)
if type is None:
raise TypeNotFound(name)
else:
type = name
cls = type.name
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'build'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | build an object for the specified typename as defined in the schema |
def cli(env, prop):
try:
if prop == 'network':
env.fout(get_network())
return
meta_prop = META_MAPPING.get(prop) or prop
env.fout(SoftLayer.MetadataManager().get(meta_prop))
except SoftLayer.TransportError:
raise exceptions.CLIAbort(
'Cannot co... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cli'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'env'}; {... | Find details about this machine. |
def tags(norm):
parts = norm.split('.')
return ['.'.join(parts[:i]) for i in range(1, len(parts) + 1)] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'tags'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'norm'}; {'id... | Divide a normalized tag string into hierarchical layers. |
def perform_create(self, serializer):
if serializer.instance is None:
serializer.save(user=self.request.user) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'perform_create'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value'... | determine user when node is added |
def batch(batch_size, items):
"Batch items into groups of batch_size"
items = list(items)
if batch_size is None:
return [items]
MISSING = object()
padded_items = items + [MISSING] * (batch_size - 1)
groups = zip(*[padded_items[i::batch_size] for i in range(batch_size)])
return [[item... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'batch'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'batch_... | Batch items into groups of batch_size |
def auto_detect_serial_unix(preferred_list=['*']):
import glob
glist = glob.glob('/dev/ttyS*') + glob.glob('/dev/ttyUSB*') + glob.glob('/dev/ttyACM*') + glob.glob('/dev/serial/by-id/*')
ret = []
others = []
for d in glist:
matches = False
for preferred in preferred_list:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'auto_detect_serial_unix'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'default_parameter', 'children': ... | try to auto-detect serial ports on unix |
def volume_create(self, name, size=100, snapshot=None, voltype=None,
availability_zone=None):
if self.volume_conn is None:
raise SaltCloudSystemExit('No cinder endpoint available')
nt_ks = self.volume_conn
response = nt_ks.volumes.create(
size=size,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '18']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'volume_create'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '9', '12', '15']}; {'id': '4', 'type': 'identifier', '... | Create a block device |
def make_lcc(attrs_dict, globe):
attr_mapping = [('central_longitude', 'longitude_of_central_meridian'),
('standard_parallels', 'standard_parallel')]
kwargs = CFProjection.build_projection_kwargs(attrs_dict, attr_mapping)
if 'standard_parallels' in kwargs:
try:
len(kw... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'make_lcc'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'att... | Handle Lambert conformal conic projection. |
def _init_file_logger(logger, level, log_path, log_size, log_count):
if level not in [logging.NOTSET, logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR, logging.CRITICAL]:
level = logging.DEBUG
for h in logger.handlers:
if isinstance(h, logging.handlers.RotatingFileHandler):
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '9']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_init_file_logger'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7', '8']}; {'id': '4', 'type': 'identifier', 'chil... | one logger only have one level RotatingFileHandler |
def senses(self, bestonly=False):
l = []
for word_id, senses,distance in self:
for sense, confidence in senses:
if not sense in l: l.append(sense)
if bestonly:
break
return l | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'senses'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Returns a list of all predicted senses |
def update_finished(self):
if self.is_updateable:
javabridge.call(self.jobject, "updateFinished", "()V")
else:
logger.critical(classes.get_classname(self.jobject) + " is not updateable!") | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'update_finished'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 's... | Signals the clusterer that updating with new data has finished. |
def alchemyencoder(obj):
if isinstance(obj, datetime.date):
return obj.isoformat()
elif isinstance(obj, decimal.Decimal):
return float(obj) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'alchemyencoder'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'ob... | JSON encoder function for SQLAlchemy special classes. |
def parse_rss_file(filename: str) -> RSSChannel:
root = parse_xml(filename).getroot()
return _parse_rss(root) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8', '10']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'parse_rss_file'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'typed_parameter', 'children': ['5',... | Parse an RSS feed from a local XML file. |
def show(self, show=True):
if show and not self.visible:
self._show()
if not show and self.visible:
self._hide() | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'show'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'};... | Show or hide the tray icon. |
def reg(self):
if self.type.is_displ or self.type.is_phrase:
size = core.get_native_size()
return base.get_register_name(self.reg_id, size)
if self.type.is_reg:
return base.get_register_name(self.reg_id, self.size)
else:
raise exceptions.SarkOperan... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'reg'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {'id'... | Name of the register used in the operand. |
def Description(self):
descr = " ".join((self.getId(), self.aq_parent.Title()))
return safe_unicode(descr).encode('utf-8') | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'Description'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Returns searchable data as Description |
def request_transfer(subject, recipient, comment):
TransferPermission(subject).test()
if recipient == (subject.organization or subject.owner):
raise ValueError(
'Recipient should be different than the current owner')
transfer = Transfer.objects.create(
owner=subject.organization ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'request_transfer'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6']}; {'id': '4', 'type': 'identifier', 'children': [], ... | Initiate a transfer request |
def count_processors():
if 'SLURM_NTASKS' in os.environ: return int(os.environ['SLURM_NTASKS'])
elif 'SLURM_JOB_CPUS_PER_NODE' in os.environ:
text = os.environ['SLURM_JOB_CPUS_PER_NODE']
if is_integer(text): return int(text)
else:
n, N = re.findall("([1-9]+)\(x([1-9]+)\)",... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'count_processors'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5']}; {'id': '5', 't... | How many cores does the current computer have ? |
def find_child_element(elm, child_local_name):
for n in range(len(elm)):
child_elm = elm[n]
tag = etree.QName(child_elm)
if tag.localname == child_local_name:
return child_elm
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'find_child_element'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Find an XML child element by local tag name. |
def cancel(task_id, secret_key=None, url=None):
if not secret_key:
secret_key = default_key()
if not url:
url = default_url()
url = '%s/cancel' % url
values = {
'id': task_id,
}
return _send_with_auth(values, secret_key, url) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '11']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cancel'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '8']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': ... | Cancel scheduled task with `task_id` |
def contributors(self):
if not self._contributors:
self._contributors = self.get_contributors()
return self._contributors | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'contributors'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self... | Property to retrieve or access the list of contributors. |
def cli(env, account_id):
manager = SoftLayer.CDNManager(env.client)
account = manager.get_account(account_id)
table = formatting.KeyValueTable(['name', 'value'])
table.align['name'] = 'r'
table.align['value'] = 'l'
table.add_row(['id', account['id']])
table.add_row(['account_name', account[... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'cli'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'env'}; {... | Detail a CDN Account. |
def generate_gaussian_profile(seeing_fwhm):
FWHM_G = 2 * math.sqrt(2 * math.log(2))
sigma = seeing_fwhm / FWHM_G
amplitude = 1.0 / (2 * math.pi * sigma * sigma)
seeing_model = Gaussian2D(amplitude=amplitude,
x_mean=0.0,
y_mean=0.0,
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'generate_gaussian_profile'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], '... | Generate a normalized Gaussian profile from its FWHM |
def dlogprior(self, param):
assert param in self.freeparams, "Invalid param: {0}".format(param)
return self._dlogprior[param] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'dlogprior'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Value of derivative of prior depends on value of `prior`. |
def _get_var_res(self, graph, var, other_var):
with tf.Session(graph=graph) as sess:
sess.run(other_var["init"])
var_res = self._get_var_res_sess(sess, var)
return var_res | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '8']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_var_res'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '6', '7']}; {'id': '4', 'type': 'identifier', 'children': [],... | Get the weights from our graph |
def team(page):
soup = BeautifulSoup(page)
try:
return soup.find('title').text.split(' | ')[0].split(' - ')[1]
except:
return None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'team'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'page'}; {'id... | Return the team name |
def _get_populate_from(self, model_instance):
if hasattr(self.populate_from, '__call__'):
tz = self.populate_from(model_instance)
else:
from_attr = getattr(model_instance, self.populate_from)
tz = callable(from_attr) and from_attr() or from_attr
try:
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_get_populate_from'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'va... | Retrieves the timezone or None from the `populate_from` attribute. |
def _set_config(xpath, element):
query = {'type': 'config',
'action': 'set',
'xpath': xpath,
'element': element}
response = __proxy__['panos.call'](query)
return _validate_response(response) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_set_config'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': '... | Sends a set request to the device. |
def compute(self):
if "Signature" in self.params:
raise RuntimeError("Existing signature in parameters")
if self.signature_version is not None:
version = self.signature_version
else:
version = self.params["SignatureVersion"]
if str(version) == "1":
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'compute'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}; {... | Compute and return the signature according to the given data. |
def _process(self, segments):
mlh, mlw = self.max_line_height, self.max_line_width
s = segments.astype(numpy.uint32)
order = mlw * (s[:, 1] // mlh) + s[:, 0]
sort_order = numpy.argsort(order)
return segments[sort_order] | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_process'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'sel... | sort segments in read order - left to right, up to down |
def _on_closed(self):
LOGGER.error('Redis connection closed')
self.connected = False
self._on_close()
self._stream = None | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_on_closed'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'}... | Invoked when the connection is closed |
def _validate_freq(self):
from pandas.tseries.frequencies import to_offset
try:
return to_offset(self.window)
except (TypeError, ValueError):
raise ValueError("passed window {0} is not "
"compatible with a datetimelike "
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_validate_freq'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'se... | Validate & return window frequency. |
def network_traffic_ports(instance):
for key, obj in instance['objects'].items():
if ('type' in obj and obj['type'] == 'network-traffic' and
('src_port' not in obj or 'dst_port' not in obj)):
yield JSONError("The Network Traffic object '%s' should contain "
... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'network_traffic_ports'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'valu... | Ensure network-traffic objects contain both src_port and dst_port. |
def _SignedBinaryIDFromURN(binary_urn
):
if binary_urn.RelativeName(GetAFF4PythonHackRoot()):
return rdf_objects.SignedBinaryID(
binary_type=rdf_objects.SignedBinaryID.BinaryType.PYTHON_HACK,
path=binary_urn.RelativeName(GetAFF4PythonHackRoot()))
elif binary_urn.Relativ... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '5']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': '_SignedBinaryIDFromURN'}; {'id': '3', 'type': 'parameters', 'children': ['4']}; {'id': '4', 'type': 'identifier', 'children': [], 'val... | Converts an AFF4 URN for a signed binary to a SignedBinaryID. |
def transformer_mlperf_tpu():
hparams = transformer_base_v3()
hparams.mlperf_mode = True
hparams.symbol_modality_num_shards = 1
hparams.max_length = 256
hparams.batch_size = 2048
hparams.hidden_size = 1024
hparams.filter_size = 4096
hparams.num_heads = 16
hparams.attention_dropout_broadcast_dims = "0,... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '4']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'transformer_mlperf_tpu'}; {'id': '3', 'type': 'parameters', 'children': []}; {'id': '4', 'type': 'block', 'children': ['5', '11', '17'... | HParams for Transformer model on TPU for MLPerf on TPU 2x2. |
def show_event_handlers(self, stream=sys.stdout, verbose=0):
lines = ["List of event handlers installed:"]
for handler in self.event_handlers:
if verbose:
lines.extend(handler.__class__.cls2str().split("\n"))
else:
lines.extend(str(handler).split("... | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '13']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'show_event_handlers'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5', '10']}; {'id': '4', 'type': 'identifier', 'children':... | Print to `stream` the event handlers installed for this flow. |
def unread(thread, user):
return bool(thread.userthread_set.filter(user=user, unread=True)) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '6']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'unread'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'threa... | Check whether there are any unread messages for a particular thread for a user. |
def handle(self, **options):
template = os.path.dirname(os.path.abspath(__file__)) + "/app_template"
name = options.pop("name")
call_command("startapp", name, template=template, **options) | {'id': '0', 'type': 'module', 'children': ['1']}; {'id': '1', 'type': 'function_definition', 'children': ['2', '3', '7']}; {'id': '2', 'type': 'function_name', 'children': [], 'value': 'handle'}; {'id': '3', 'type': 'parameters', 'children': ['4', '5']}; {'id': '4', 'type': 'identifier', 'children': [], 'value': 'self'... | Call "startapp" to generate app with custom user model. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.