id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
28,362
def get_binary_stream(name): opener = binary_streams.get(name) if (opener is None): raise TypeError(('Unknown standard stream %r' % name)) return opener()
[ "def", "get_binary_stream", "(", "name", ")", ":", "opener", "=", "binary_streams", ".", "get", "(", "name", ")", "if", "(", "opener", "is", "None", ")", ":", "raise", "TypeError", "(", "(", "'Unknown standard stream %r'", "%", "name", ")", ")", "return", ...
returns a system stream for byte processing .
train
true
28,363
def service_up(s_name, **connection_args): service = _service_get(s_name, **connection_args) return ((service is not None) and (service.get_svrstate() == 'UP'))
[ "def", "service_up", "(", "s_name", ",", "**", "connection_args", ")", ":", "service", "=", "_service_get", "(", "s_name", ",", "**", "connection_args", ")", "return", "(", "(", "service", "is", "not", "None", ")", "and", "(", "service", ".", "get_svrstate...
checks if a service is up cli example: .
train
true
28,365
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
28,366
def get_mounted_filesystems(): mounted = [] lines = subprocess.check_output(['/sbin/mount']).splitlines() for line in lines: reg = RE_MOUNT.search(line) if (not reg): continue mounted.append(reg.groupdict()) return mounted
[ "def", "get_mounted_filesystems", "(", ")", ":", "mounted", "=", "[", "]", "lines", "=", "subprocess", ".", "check_output", "(", "[", "'/sbin/mount'", "]", ")", ".", "splitlines", "(", ")", "for", "line", "in", "lines", ":", "reg", "=", "RE_MOUNT", ".", ...
return a list of dict with info of mounted file systems each dict is composed of: - fs_spec - fs_file - fs_vfstype .
train
false
28,367
def getvm_info(vm, depth=1): maxdepth = 10 if hasattr(vm, 'childEntity'): if (depth > maxdepth): return vmlist = vm.childEntity for c in vmlist: getvm_info(c, (depth + 1)) return if hasattr(vm, 'CloneVApp_Task'): vmlist = vm.vm for c in vmlist: getvm_info(c) return try: uuid = vm.config.instanceUuid uuid = uuid.replace('-', '') INV_VM.append(uuid) except Exception as e: print ('Caught exception : ' + str(e)) return (-1)
[ "def", "getvm_info", "(", "vm", ",", "depth", "=", "1", ")", ":", "maxdepth", "=", "10", "if", "hasattr", "(", "vm", ",", "'childEntity'", ")", ":", "if", "(", "depth", ">", "maxdepth", ")", ":", "return", "vmlist", "=", "vm", ".", "childEntity", "...
print information for a particular virtual machine or recurse into a folder with depth protection from the getallvms .
train
false
28,370
def list_ikepolicies(profile=None): conn = _auth(profile) return conn.list_ikepolicies()
[ "def", "list_ikepolicies", "(", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "list_ikepolicies", "(", ")" ]
fetches a list of all configured ikepolicies for a tenant cli example: .
train
false
28,371
def connectProtocol(endpoint, protocol): class OneShotFactory(Factory, ): def buildProtocol(self, addr): return protocol return endpoint.connect(OneShotFactory())
[ "def", "connectProtocol", "(", "endpoint", ",", "protocol", ")", ":", "class", "OneShotFactory", "(", "Factory", ",", ")", ":", "def", "buildProtocol", "(", "self", ",", "addr", ")", ":", "return", "protocol", "return", "endpoint", ".", "connect", "(", "On...
connect a protocol instance to an endpoint .
train
false
28,372
def gettags(comment): tags = [] tag = None datatype = None name = None tag_lineno = lineno = 0 tag_text = [] for line in comment.split('\n'): line = line.strip() if line.startswith('@'): tags.append((tag_lineno, tag, datatype, name, '\n'.join(tag_text))) parts = line.split(None, 3) if (len(parts) == 1): datatype = '' name = '' tag_text = [] elif (len(parts) == 2): datatype = parts[1] name = '' tag_text = [] elif (len(parts) == 3): datatype = parts[1] name = parts[2] tag_text = [] elif (len(parts) == 4): datatype = parts[1] name = parts[2] tag_text = [parts[3].lstrip()] tag = parts[0][1:] tag_lineno = lineno elif line: tag_text.append(line) lineno += 1 tags.append((tag_lineno, tag, datatype, name, '\n'.join(tag_text))) return tags
[ "def", "gettags", "(", "comment", ")", ":", "tags", "=", "[", "]", "tag", "=", "None", "datatype", "=", "None", "name", "=", "None", "tag_lineno", "=", "lineno", "=", "0", "tag_text", "=", "[", "]", "for", "line", "in", "comment", ".", "split", "("...
parse documentation strings into javadoc-like tokens .
train
false
28,373
@auth.route('/reset-password/<token>', methods=['GET', 'POST']) def reset_password(token): if (not current_user.is_anonymous): return redirect(url_for('forum.index')) form = ResetPasswordForm() if form.validate_on_submit(): (expired, invalid, user) = get_token_status(form.token.data, 'reset_password') if invalid: flash(_('Your password token is invalid.'), 'danger') return redirect(url_for('auth.forgot_password')) if expired: flash(_('Your password token is expired.'), 'danger') return redirect(url_for('auth.forgot_password')) if user: user.password = form.password.data user.save() flash(_('Your password has been updated.'), 'success') return redirect(url_for('auth.login')) form.token.data = token return render_template('auth/reset_password.html', form=form)
[ "@", "auth", ".", "route", "(", "'/reset-password/<token>'", ",", "methods", "=", "[", "'GET'", ",", "'POST'", "]", ")", "def", "reset_password", "(", "token", ")", ":", "if", "(", "not", "current_user", ".", "is_anonymous", ")", ":", "return", "redirect",...
view function that handles a reset password request .
train
false
28,374
def unregister_models(engine): models = (ArtifactDependency, ArtifactBlobLocation, ArtifactBlob, ArtifactProperty, ArtifactTag, Artifact) for model in models: model.metadata.drop_all(engine)
[ "def", "unregister_models", "(", "engine", ")", ":", "models", "=", "(", "ArtifactDependency", ",", "ArtifactBlobLocation", ",", "ArtifactBlob", ",", "ArtifactProperty", ",", "ArtifactTag", ",", "Artifact", ")", "for", "model", "in", "models", ":", "model", ".",...
unregister models .
train
false
28,375
def CreateFileVersion(path, content, timestamp, token=None): with test_lib.FakeTime(timestamp): with aff4.FACTORY.Create(path, aff4_type=aff4_grr.VFSFile, mode='w', token=token) as fd: fd.Write(content) fd.Set(fd.Schema.CONTENT_LAST, rdfvalue.RDFDatetime.Now())
[ "def", "CreateFileVersion", "(", "path", ",", "content", ",", "timestamp", ",", "token", "=", "None", ")", ":", "with", "test_lib", ".", "FakeTime", "(", "timestamp", ")", ":", "with", "aff4", ".", "FACTORY", ".", "Create", "(", "path", ",", "aff4_type",...
add a new version for a file .
train
false
28,376
def analyze_deps(related_targets): _expand_deps(related_targets) return _topological_sort(related_targets)
[ "def", "analyze_deps", "(", "related_targets", ")", ":", "_expand_deps", "(", "related_targets", ")", "return", "_topological_sort", "(", "related_targets", ")" ]
analyze the dependency relationship between targets .
train
false
28,379
@login_required @ensure_csrf_cookie @require_http_methods(['GET']) def course_info_handler(request, course_key_string): try: course_key = CourseKey.from_string(course_key_string) except InvalidKeyError: raise Http404 with modulestore().bulk_operations(course_key): course_module = get_course_and_check_access(course_key, request.user) if (not course_module): raise Http404 if ('text/html' in request.META.get('HTTP_ACCEPT', 'text/html')): return render_to_response('course_info.html', {'context_course': course_module, 'updates_url': reverse_course_url('course_info_update_handler', course_key), 'handouts_locator': course_key.make_usage_key('course_info', 'handouts'), 'base_asset_url': StaticContent.get_base_url_path_for_course_assets(course_module.id), 'push_notification_enabled': push_notification_enabled()}) else: return HttpResponseBadRequest('Only supports html requests')
[ "@", "login_required", "@", "ensure_csrf_cookie", "@", "require_http_methods", "(", "[", "'GET'", "]", ")", "def", "course_info_handler", "(", "request", ",", "course_key_string", ")", ":", "try", ":", "course_key", "=", "CourseKey", ".", "from_string", "(", "co...
get html: return html for editing the course info handouts and updates .
train
false
28,381
def instance_floating_address_get_all(context, instance_uuid): return IMPL.instance_floating_address_get_all(context, instance_uuid)
[ "def", "instance_floating_address_get_all", "(", "context", ",", "instance_uuid", ")", ":", "return", "IMPL", ".", "instance_floating_address_get_all", "(", "context", ",", "instance_uuid", ")" ]
get all floating ip addresses of an instance .
train
false
28,383
def read_packages_sources(): def read_from_file(config_filename): '\n Reads a sources.lst file from a given location\n\n :param config_filename: the configuration file to read\n ' global packages_sources try: f = open(config_filename, 'r') except Exception as e: pass else: for line in f: t = line.rstrip().split(' ') packages_sources[t[0]] = this_package = package_info(config_filename, t[0], t[1], t[2], urllib.unquote(t[3]), None) if super_powers: read_from_file(os.path.join(dataset_conf_path, dataset_sources)) else: paths = [os.path.join(root_conf_path, dataset_sources), os.path.join(user_conf_path, dataset_sources)] try: paths += [os.path.join(x, dataset_sources) for x in re.split(':|;', os.environ['PYLEARN2_DATA_PATH'])] except Exception: pass for path in paths: read_from_file(path) if (len(packages_sources) == 0): raise RuntimeError('[cf] fatal: could not find/read sources.lst (unexpected!)')
[ "def", "read_packages_sources", "(", ")", ":", "def", "read_from_file", "(", "config_filename", ")", ":", "global", "packages_sources", "try", ":", "f", "=", "open", "(", "config_filename", ",", "'r'", ")", "except", "Exception", "as", "e", ":", "pass", "els...
reads the sources .
train
false
28,384
def read_forward(handle): while True: line = handle.readline() if ((not line) or (line and line.strip())): return line
[ "def", "read_forward", "(", "handle", ")", ":", "while", "True", ":", "line", "=", "handle", ".", "readline", "(", ")", "if", "(", "(", "not", "line", ")", "or", "(", "line", "and", "line", ".", "strip", "(", ")", ")", ")", ":", "return", "line" ...
reads through whitespaces .
train
false
28,385
def find_users(email): return UserProfile.objects.filter((Q(email=email) | Q(history__email=email))).distinct()
[ "def", "find_users", "(", "email", ")", ":", "return", "UserProfile", ".", "objects", ".", "filter", "(", "(", "Q", "(", "email", "=", "email", ")", "|", "Q", "(", "history__email", "=", "email", ")", ")", ")", ".", "distinct", "(", ")" ]
given an email find all the possible users .
train
false
28,387
def UnpackSortKeyPrefix(prefix): (timestamp, random_bits) = struct.unpack('>IH', base64hex.B64HexDecode(prefix)) return timestamp
[ "def", "UnpackSortKeyPrefix", "(", "prefix", ")", ":", "(", "timestamp", ",", "random_bits", ")", "=", "struct", ".", "unpack", "(", "'>IH'", ",", "base64hex", ".", "B64HexDecode", "(", "prefix", ")", ")", "return", "timestamp" ]
returns the timestamp in the provided sort key prefix .
train
false
28,388
def inner_DFS(digr, node, node_explored, finishing_times): node_explored.add(node) for each in digr.neighbors(node): if (each not in node_explored): inner_DFS(digr, each, node_explored, finishing_times) global finishing_counter finishing_times.append(node)
[ "def", "inner_DFS", "(", "digr", ",", "node", ",", "node_explored", ",", "finishing_times", ")", ":", "node_explored", ".", "add", "(", "node", ")", "for", "each", "in", "digr", ".", "neighbors", "(", "node", ")", ":", "if", "(", "each", "not", "in", ...
inner dfs used in dfs loop method .
train
false
28,390
def _renameTable(tname): tname = re_upper.sub('_\\1', tname) if tname.startswith('_'): tname = tname[1:] return tname.lower()
[ "def", "_renameTable", "(", "tname", ")", ":", "tname", "=", "re_upper", ".", "sub", "(", "'_\\\\1'", ",", "tname", ")", "if", "tname", ".", "startswith", "(", "'_'", ")", ":", "tname", "=", "tname", "[", "1", ":", "]", "return", "tname", ".", "low...
build the name of a table .
train
false
28,391
@testing.requires_testing_data def test_show_fiff(): info = show_fiff(fname_evoked) keys = ['FIFF_EPOCH', 'FIFFB_HPI_COIL', 'FIFFB_PROJ_ITEM', 'FIFFB_PROCESSED_DATA', 'FIFFB_EVOKED', 'FIFF_NAVE', 'FIFF_EPOCH'] assert_true(all(((key in info) for key in keys))) info = show_fiff(fname_raw, read_limit=1024) assert_true(('COORD_TRANS' in show_fiff(fname_fsaverage_trans)))
[ "@", "testing", ".", "requires_testing_data", "def", "test_show_fiff", "(", ")", ":", "info", "=", "show_fiff", "(", "fname_evoked", ")", "keys", "=", "[", "'FIFF_EPOCH'", ",", "'FIFFB_HPI_COIL'", ",", "'FIFFB_PROJ_ITEM'", ",", "'FIFFB_PROCESSED_DATA'", ",", "'FIF...
test show_fiff .
train
false
28,392
def resize(mountpoint, size): if (size == 'max'): if (not salt.utils.fsutils._is_device(mountpoint)): raise CommandExecutionError('Mountpoint "{0}" should be a valid device'.format(mountpoint)) if (not salt.utils.fsutils._get_mounts('btrfs').get(mountpoint)): raise CommandExecutionError('Device "{0}" should be mounted'.format(mountpoint)) elif ((len(size) < 3) or (size[0] not in '-+') or (size[(-1)] not in 'kKmMgGtTpPeE') or re.sub('\\d', '', size[1:][:(-1)])): raise CommandExecutionError('Unknown size: "{0}". Expected: [+/-]<newsize>[kKmMgGtTpPeE]|max'.format(size)) out = __salt__['cmd.run_all']('btrfs filesystem resize {0} {1}'.format(size, mountpoint)) salt.utils.fsutils._verify_run(out) ret = {'log': out['stdout']} ret.update(__salt__['btrfs.info'](mountpoint)) return ret
[ "def", "resize", "(", "mountpoint", ",", "size", ")", ":", "if", "(", "size", "==", "'max'", ")", ":", "if", "(", "not", "salt", ".", "utils", ".", "fsutils", ".", "_is_device", "(", "mountpoint", ")", ")", ":", "raise", "CommandExecutionError", "(", ...
resize a given image file maintaining the aspect ratio .
train
true
28,393
def alias_get(indices=None, aliases=None, hosts=None, profile=None): es = _get_instance(hosts, profile) try: ret = es.indices.get_alias(index=indices, name=aliases) return ret except elasticsearch.exceptions.NotFoundError: return None return None
[ "def", "alias_get", "(", "indices", "=", "None", ",", "aliases", "=", "None", ",", "hosts", "=", "None", ",", "profile", "=", "None", ")", ":", "es", "=", "_get_instance", "(", "hosts", ",", "profile", ")", "try", ":", "ret", "=", "es", ".", "indic...
check for the existence of an alias and if it exists .
train
false
28,394
def getCrossHatchPointLine(crossHatchPointLineTable, y): if (not crossHatchPointLineTable.has_key(y)): crossHatchPointLineTable[y] = {} return crossHatchPointLineTable[y]
[ "def", "getCrossHatchPointLine", "(", "crossHatchPointLineTable", ",", "y", ")", ":", "if", "(", "not", "crossHatchPointLineTable", ".", "has_key", "(", "y", ")", ")", ":", "crossHatchPointLineTable", "[", "y", "]", "=", "{", "}", "return", "crossHatchPointLineT...
get the cross hatch point line .
train
false
28,395
def get_unassociated_eip_address(domain='standard', region=None, key=None, keyid=None, profile=None): eip = None for address in get_all_eip_addresses(region=region, key=key, keyid=keyid, profile=profile): address_info = get_eip_address_info(addresses=address, region=region, key=key, keyid=keyid, profile=profile)[0] if address_info['instance_id']: log.debug('{0} is already associated with the instance {1}'.format(address, address_info['instance_id'])) continue if address_info['network_interface_id']: log.debug('{0} is already associated with the network interface {1}'.format(address, address_info['network_interface_id'])) continue if (address_info['domain'] == domain): log.debug("The first unassociated EIP address in the domain '{0}' is {1}".format(domain, address)) eip = address break if (not eip): log.debug('No unassociated Elastic IP found!') return eip
[ "def", "get_unassociated_eip_address", "(", "domain", "=", "'standard'", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "eip", "=", "None", "for", "address", "in", "get_all_eip_address...
return the first unassociated eip domain indicates whether the address is a ec2 address or a vpc address .
train
true
28,396
def pre_delete_layer(instance, sender, **kwargs): if instance.is_remote: return logger.debug('Going to delete the styles associated for [%s]', instance.typename.encode('utf-8')) ct = ContentType.objects.get_for_model(instance) OverallRating.objects.filter(content_type=ct, object_id=instance.id).delete() default_style = instance.default_style for style in instance.styles.all(): if (style.layer_styles.all().count() == 1): if (style != default_style): style.delete() remove_object_permissions(instance)
[ "def", "pre_delete_layer", "(", "instance", ",", "sender", ",", "**", "kwargs", ")", ":", "if", "instance", ".", "is_remote", ":", "return", "logger", ".", "debug", "(", "'Going to delete the styles associated for [%s]'", ",", "instance", ".", "typename", ".", "...
remove any associated style to the layer .
train
false
28,397
def dump_to_file(ctrl, pid, proc_name, folder): ctrl_safe_name = str(ctrl.__class__.__name__).split('_')[(-1)].lower() file_name = '{0}_{1}_{2}_{3:#x}.txt'.format(pid, proc_name, ctrl_safe_name, ctrl.v()) with open(os.path.join(folder, file_name), 'wb') as out_file: out_file.write(ctrl.get_text())
[ "def", "dump_to_file", "(", "ctrl", ",", "pid", ",", "proc_name", ",", "folder", ")", ":", "ctrl_safe_name", "=", "str", "(", "ctrl", ".", "__class__", ".", "__name__", ")", ".", "split", "(", "'_'", ")", "[", "(", "-", "1", ")", "]", ".", "lower",...
dumps the data of the control to a file .
train
false
28,398
def _get_profile_image_urls(name, storage, file_extension=PROFILE_IMAGE_FILE_EXTENSION, version=None): def _make_url(size): url = storage.url(_get_profile_image_filename(name, size, file_extension=file_extension)) return ('{}?v={}'.format(url, version) if (version is not None) else url) return {size_display_name: _make_url(size) for (size_display_name, size) in PROFILE_IMAGE_SIZES_MAP.items()}
[ "def", "_get_profile_image_urls", "(", "name", ",", "storage", ",", "file_extension", "=", "PROFILE_IMAGE_FILE_EXTENSION", ",", "version", "=", "None", ")", ":", "def", "_make_url", "(", "size", ")", ":", "url", "=", "storage", ".", "url", "(", "_get_profile_i...
returns a dict containing the urls for a complete set of profile images .
train
false
28,399
def PLUS_DM(barDs, count, timeperiod=(- (2 ** 31))): return call_talib_with_hl(barDs, count, talib.PLUS_DM, timeperiod)
[ "def", "PLUS_DM", "(", "barDs", ",", "count", ",", "timeperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ")", ":", "return", "call_talib_with_hl", "(", "barDs", ",", "count", ",", "talib", ".", "PLUS_DM", ",", "timeperiod", ")" ]
plus directional movement .
train
false
28,400
def test_decorator(interface, original): return interface_decorator('test_decorator', interface, _test_logged_method, original)
[ "def", "test_decorator", "(", "interface", ",", "original", ")", ":", "return", "interface_decorator", "(", "'test_decorator'", ",", "interface", ",", "_test_logged_method", ",", "original", ")" ]
consumer of interface_decorator .
train
false
28,401
@audio_video_fx def volumex(clip, factor): return clip.fl((lambda gf, t: (factor * gf(t))), keep_duration=True)
[ "@", "audio_video_fx", "def", "volumex", "(", "clip", ",", "factor", ")", ":", "return", "clip", ".", "fl", "(", "(", "lambda", "gf", ",", "t", ":", "(", "factor", "*", "gf", "(", "t", ")", ")", ")", ",", "keep_duration", "=", "True", ")" ]
returns a clip with audio volume multiplied by the value factor .
train
false
28,402
def adjust_log(image, gain=1, inv=False): _assert_non_negative(image) dtype = image.dtype.type scale = float((dtype_limits(image, True)[1] - dtype_limits(image, True)[0])) if inv: out = ((((2 ** (image / scale)) - 1) * scale) * gain) return dtype(out) out = ((np.log2((1 + (image / scale))) * scale) * gain) return dtype(out)
[ "def", "adjust_log", "(", "image", ",", "gain", "=", "1", ",", "inv", "=", "False", ")", ":", "_assert_non_negative", "(", "image", ")", "dtype", "=", "image", ".", "dtype", ".", "type", "scale", "=", "float", "(", "(", "dtype_limits", "(", "image", ...
performs logarithmic correction on the input image .
train
false
28,403
def ensure_geo_reference(origin): if isinstance(origin, Geo_reference): geo_ref = origin elif (origin is None): geo_ref = None else: geo_ref = apply(Geo_reference, origin) return geo_ref
[ "def", "ensure_geo_reference", "(", "origin", ")", ":", "if", "isinstance", "(", "origin", ",", "Geo_reference", ")", ":", "geo_ref", "=", "origin", "elif", "(", "origin", "is", "None", ")", ":", "geo_ref", "=", "None", "else", ":", "geo_ref", "=", "appl...
given a list/tuple of zone .
train
true
28,404
def column_index_from_string(column, fast=False): column = column.upper() clen = len(column) if ((not fast) and (not all((('A' <= char <= 'Z') for char in column)))): msg = ('Column string must contain only characters A-Z: got %s' % column) raise ColumnStringIndexException(msg) if (clen == 1): return (ord(column[0]) - 64) elif (clen == 2): return (((1 + (ord(column[0]) - 65)) * 26) + (ord(column[1]) - 64)) elif (clen == 3): return ((((1 + (ord(column[0]) - 65)) * 676) + ((1 + (ord(column[1]) - 65)) * 26)) + (ord(column[2]) - 64)) elif (clen > 3): raise ColumnStringIndexException('Column string index can not be longer than 3 characters') else: raise ColumnStringIndexException('Column string index can not be empty')
[ "def", "column_index_from_string", "(", "column", ",", "fast", "=", "False", ")", ":", "column", "=", "column", ".", "upper", "(", ")", "clen", "=", "len", "(", "column", ")", "if", "(", "(", "not", "fast", ")", "and", "(", "not", "all", "(", "(", ...
convert a column name into a numerical index .
train
false
28,405
def model_from_json(json_string, custom_objects=None): import json from keras.utils.layer_utils import layer_from_config config = json.loads(json_string) return layer_from_config(config, custom_objects=custom_objects)
[ "def", "model_from_json", "(", "json_string", ",", "custom_objects", "=", "None", ")", ":", "import", "json", "from", "keras", ".", "utils", ".", "layer_utils", "import", "layer_from_config", "config", "=", "json", ".", "loads", "(", "json_string", ")", "retur...
parses a json model configuration file and returns a model instance .
train
false
28,406
def addNegativesByRadius(elementNode, end, negatives, radius, start): if (radius <= 0.0): return copyShallow = elementNode.getCopyShallow() extrude.setElementNodeToEndStart(copyShallow, end, start) extrudeDerivation = extrude.ExtrudeDerivation(copyShallow) extrude.addNegatives(extrudeDerivation, negatives, [getTeardropPathByEndStart(elementNode, end, radius, start)])
[ "def", "addNegativesByRadius", "(", "elementNode", ",", "end", ",", "negatives", ",", "radius", ",", "start", ")", ":", "if", "(", "radius", "<=", "0.0", ")", ":", "return", "copyShallow", "=", "elementNode", ".", "getCopyShallow", "(", ")", "extrude", "."...
add teardrop drill hole to negatives .
train
false
28,407
def mc2cum(mc): return mnc2cum(mc2mnc(mc))
[ "def", "mc2cum", "(", "mc", ")", ":", "return", "mnc2cum", "(", "mc2mnc", "(", "mc", ")", ")" ]
just chained because i have still the test case .
train
false
28,408
@public def sfield(exprs, *symbols, **options): single = False if (not is_sequence(exprs)): (exprs, single) = ([exprs], True) exprs = list(map(sympify, exprs)) opt = build_options(symbols, options) numdens = [] for expr in exprs: numdens.extend(expr.as_numer_denom()) (reps, opt) = _parallel_dict_from_expr(numdens, opt) if (opt.domain is None): coeffs = sum([list(rep.values()) for rep in reps], []) (opt.domain, _) = construct_domain(coeffs, opt=opt) _field = FracField(opt.gens, opt.domain, opt.order) fracs = [] for i in range(0, len(reps), 2): fracs.append(_field(tuple(reps[i:(i + 2)]))) if single: return (_field, fracs[0]) else: return (_field, fracs)
[ "@", "public", "def", "sfield", "(", "exprs", ",", "*", "symbols", ",", "**", "options", ")", ":", "single", "=", "False", "if", "(", "not", "is_sequence", "(", "exprs", ")", ")", ":", "(", "exprs", ",", "single", ")", "=", "(", "[", "exprs", "]"...
construct a field deriving generators and domain from options and input expressions .
train
false
28,409
def _get_engine(data_dict): connection_url = data_dict['connection_url'] engine = _engines.get(connection_url) if (not engine): extras = {'url': connection_url} engine = sqlalchemy.engine_from_config(config, 'ckan.datastore.sqlalchemy.', **extras) _engines[connection_url] = engine return engine
[ "def", "_get_engine", "(", "data_dict", ")", ":", "connection_url", "=", "data_dict", "[", "'connection_url'", "]", "engine", "=", "_engines", ".", "get", "(", "connection_url", ")", "if", "(", "not", "engine", ")", ":", "extras", "=", "{", "'url'", ":", ...
get either read or write engine .
train
false
28,411
@routes.route('/health') def health(): content = Markup(markdown.markdown('The server is healthy!')) return content
[ "@", "routes", ".", "route", "(", "'/health'", ")", "def", "health", "(", ")", ":", "content", "=", "Markup", "(", "markdown", ".", "markdown", "(", "'The server is healthy!'", ")", ")", "return", "content" ]
we can add an array of tests here to check the servers health .
train
false
28,412
def classname_for_table(base, tablename, table): return str(tablename)
[ "def", "classname_for_table", "(", "base", ",", "tablename", ",", "table", ")", ":", "return", "str", "(", "tablename", ")" ]
return the class name that should be used .
train
false
28,413
def flattened_order_key(key): return tuple(([len(key)] + list(key)))
[ "def", "flattened_order_key", "(", "key", ")", ":", "return", "tuple", "(", "(", "[", "len", "(", "key", ")", "]", "+", "list", "(", "key", ")", ")", ")" ]
order by key length first then values .
train
false
28,414
def get_topic_policy(topic_name): pubsub_client = pubsub.Client() topic = pubsub_client.topic(topic_name) policy = topic.get_iam_policy() print 'Policy for topic {}:'.format(topic.name) print 'Version: {}'.format(policy.version) print 'Owners: {}'.format(policy.owners) print 'Editors: {}'.format(policy.editors) print 'Viewers: {}'.format(policy.viewers) print 'Publishers: {}'.format(policy.publishers) print 'Subscribers: {}'.format(policy.subscribers)
[ "def", "get_topic_policy", "(", "topic_name", ")", ":", "pubsub_client", "=", "pubsub", ".", "Client", "(", ")", "topic", "=", "pubsub_client", ".", "topic", "(", "topic_name", ")", "policy", "=", "topic", ".", "get_iam_policy", "(", ")", "print", "'Policy f...
prints the iam policy for the given topic .
train
false
28,415
def is_mac(): return (platform.system() == 'Darwin')
[ "def", "is_mac", "(", ")", ":", "return", "(", "platform", ".", "system", "(", ")", "==", "'Darwin'", ")" ]
return whether the argument is a mac address .
train
false
28,417
def p_expression_uminus(p): p[0] = (- p[2])
[ "def", "p_expression_uminus", "(", "p", ")", ":", "p", "[", "0", "]", "=", "(", "-", "p", "[", "2", "]", ")" ]
expression : minus expression %prec uminus .
train
false
28,421
def could_edit(request): user = getattr(request, 'user', AnonymousUser()) return (getattr(user, 'is_superuser', False) or getattr(user, 'is_staff', False))
[ "def", "could_edit", "(", "request", ")", ":", "user", "=", "getattr", "(", "request", ",", "'user'", ",", "AnonymousUser", "(", ")", ")", "return", "(", "getattr", "(", "user", ",", "'is_superuser'", ",", "False", ")", "or", "getattr", "(", "user", ",...
return true if the context of the given request would allow xtheme editing .
train
false
28,423
def sle(actual, predicted): return np.power((np.log((np.array(actual) + 1)) - np.log((np.array(predicted) + 1))), 2)
[ "def", "sle", "(", "actual", ",", "predicted", ")", ":", "return", "np", ".", "power", "(", "(", "np", ".", "log", "(", "(", "np", ".", "array", "(", "actual", ")", "+", "1", ")", ")", "-", "np", ".", "log", "(", "(", "np", ".", "array", "(...
computes the squared log error .
train
true
28,425
def encode_timestamps(t1, t2=None, t3=None, explicit=False): form = '{0}' values = [t1.short] if (t2 is not None): t2_t1_delta = (t2.raw - t1.raw) explicit = (explicit or (t2_t1_delta != 0)) values.append(t2_t1_delta) if (t3 is not None): t3_t2_delta = (t3.raw - t2.raw) explicit = (explicit or (t3_t2_delta != 0)) values.append(t3_t2_delta) if explicit: form += '{1:+x}' if (t3 is not None): form += '{2:+x}' return form.format(*values)
[ "def", "encode_timestamps", "(", "t1", ",", "t2", "=", "None", ",", "t3", "=", "None", ",", "explicit", "=", "False", ")", ":", "form", "=", "'{0}'", "values", "=", "[", "t1", ".", "short", "]", "if", "(", "t2", "is", "not", "None", ")", ":", "...
encode up to three timestamps into a string .
train
false
28,428
def __unpack_and_run(): script_keys = get_instance_metadata_attribute('startup_loader_files') key_list = (script_keys.split('+') if script_keys else []) unpack_files(key_list) if script_keys: clear_instance_metadata('startup_loader_files') startup_command = get_instance_metadata_attribute('startup_command') if (not startup_command): sys.stderr.write('No "startup_command" metadata key.\n') raise SystemExit('No "startup_command" metadata key.') command = ('chmod gou+rx /opt/spinnaker/install/*.sh; ' + startup_command.replace('+', ' ')) with open('__startup_script__.sh', 'w') as f: f.write('#!/bin/bash\ncd /opt/spinnaker/install\n{command}\n'.format(command=command)) os.chmod('__startup_script__.sh', 365) write_instance_metadata('startup-script', '/opt/spinnaker/install/__startup_script__.sh') clear_instance_metadata('startup_command') p = subprocess.Popen(command, shell=True, close_fds=True) p.wait() return p.returncode
[ "def", "__unpack_and_run", "(", ")", ":", "script_keys", "=", "get_instance_metadata_attribute", "(", "'startup_loader_files'", ")", "key_list", "=", "(", "script_keys", ".", "split", "(", "'+'", ")", "if", "script_keys", "else", "[", "]", ")", "unpack_files", "...
unpack the files from metadata .
train
false
28,429
def wire_encode(obj): return dumps(_cached_dfs_serialize(obj))
[ "def", "wire_encode", "(", "obj", ")", ":", "return", "dumps", "(", "_cached_dfs_serialize", "(", "obj", ")", ")" ]
encode the given model object into bytes .
train
false
28,430
def is_sysadmin(username): user = _get_user(username) return (user and user.sysadmin)
[ "def", "is_sysadmin", "(", "username", ")", ":", "user", "=", "_get_user", "(", "username", ")", "return", "(", "user", "and", "user", ".", "sysadmin", ")" ]
returns true is username is a sysadmin .
train
false
28,433
def _schedule_probes(probes): return __salt__['probes.schedule_probes'](_ordered_dict_to_dict(probes), commit=False)
[ "def", "_schedule_probes", "(", "probes", ")", ":", "return", "__salt__", "[", "'probes.schedule_probes'", "]", "(", "_ordered_dict_to_dict", "(", "probes", ")", ",", "commit", "=", "False", ")" ]
calls the salt module "probes" to schedule the configured probes on the device .
train
false
28,434
def get_diff_chunk_generator_class(): return _generator
[ "def", "get_diff_chunk_generator_class", "(", ")", ":", "return", "_generator" ]
returns the diffchunkgenerator class used for generating chunks .
train
false
28,435
def testStandingsBeforeMatches(): deleteMatches() deletePlayers() registerPlayer('Melpomene Murray') registerPlayer('Randy Schwartz') standings = playerStandings() if (len(standings) < 2): raise ValueError('Players should appear in playerStandings even before they have played any matches.') elif (len(standings) > 2): raise ValueError('Only registered players should appear in standings.') if (len(standings[0]) != 4): raise ValueError('Each playerStandings row should have four columns.') [(id1, name1, wins1, matches1), (id2, name2, wins2, matches2)] = standings if ((matches1 != 0) or (matches2 != 0) or (wins1 != 0) or (wins2 != 0)): raise ValueError('Newly registered players should have no matches or wins.') if (set([name1, name2]) != set(['Melpomene Murray', 'Randy Schwartz'])): raise ValueError("Registered players' names should appear in standings, even if they have no matches played.") print '6. Newly registered players appear in the standings with no matches.'
[ "def", "testStandingsBeforeMatches", "(", ")", ":", "deleteMatches", "(", ")", "deletePlayers", "(", ")", "registerPlayer", "(", "'Melpomene Murray'", ")", "registerPlayer", "(", "'Randy Schwartz'", ")", "standings", "=", "playerStandings", "(", ")", "if", "(", "l...
test to ensure players are properly represented in standings prior to any matches being reported .
train
false
28,436
def create_ipsec_site_connection(name, ipsecpolicy, ikepolicy, vpnservice, peer_cidrs, peer_address, peer_id, psk, admin_state_up=True, profile=None, **kwargs): conn = _auth(profile) return conn.create_ipsec_site_connection(name, ipsecpolicy, ikepolicy, vpnservice, peer_cidrs, peer_address, peer_id, psk, admin_state_up, **kwargs)
[ "def", "create_ipsec_site_connection", "(", "name", ",", "ipsecpolicy", ",", "ikepolicy", ",", "vpnservice", ",", "peer_cidrs", ",", "peer_address", ",", "peer_id", ",", "psk", ",", "admin_state_up", "=", "True", ",", "profile", "=", "None", ",", "**", "kwargs...
creates a new ipsecsiteconnection cli example: .
train
true
28,437
def ccode(expr, assign_to=None, **settings): return CCodePrinter(settings).doprint(expr, assign_to)
[ "def", "ccode", "(", "expr", ",", "assign_to", "=", "None", ",", "**", "settings", ")", ":", "return", "CCodePrinter", "(", "settings", ")", ".", "doprint", "(", "expr", ",", "assign_to", ")" ]
converts an expr to a string of c code parameters expr : expr a sympy expression to be converted .
train
false
28,439
def clear_managers(): for manager in proxies.values(): manager.close() proxies.clear()
[ "def", "clear_managers", "(", ")", ":", "for", "manager", "in", "proxies", ".", "values", "(", ")", ":", "manager", ".", "close", "(", ")", "proxies", ".", "clear", "(", ")" ]
remove all current db-api 2 .
train
false
28,440
@contextlib.contextmanager def change_recursion_limit(limit): old_limit = sys.getrecursionlimit() if (old_limit < limit): sys.setrecursionlimit(limit) (yield) sys.setrecursionlimit(old_limit)
[ "@", "contextlib", ".", "contextmanager", "def", "change_recursion_limit", "(", "limit", ")", ":", "old_limit", "=", "sys", ".", "getrecursionlimit", "(", ")", "if", "(", "old_limit", "<", "limit", ")", ":", "sys", ".", "setrecursionlimit", "(", "limit", ")"...
temporarily changes the recursion limit .
train
false
28,441
def gauss_spline(x, n): signsq = ((n + 1) / 12.0) return ((1 / sqrt(((2 * pi) * signsq))) * exp((((- (x ** 2)) / 2) / signsq)))
[ "def", "gauss_spline", "(", "x", ",", "n", ")", ":", "signsq", "=", "(", "(", "n", "+", "1", ")", "/", "12.0", ")", "return", "(", "(", "1", "/", "sqrt", "(", "(", "(", "2", "*", "pi", ")", "*", "signsq", ")", ")", ")", "*", "exp", "(", ...
gaussian approximation to b-spline basis function of order n .
train
false
28,442
def testImport(module_name): try: return testImport.cache[module_name] except KeyError: try: __import__(module_name) except ImportError: result = False else: result = True testImport.cache[module_name] = result return result
[ "def", "testImport", "(", "module_name", ")", ":", "try", ":", "return", "testImport", ".", "cache", "[", "module_name", "]", "except", "KeyError", ":", "try", ":", "__import__", "(", "module_name", ")", "except", "ImportError", ":", "result", "=", "False", ...
tell wether a module can be imported .
train
false
28,443
def _get_course_email_context(course): course_id = course.id.to_deprecated_string() course_title = course.display_name course_end_date = get_default_time_display(course.end) course_root = reverse('course_root', kwargs={'course_id': course_id}) course_url = '{}{}'.format(settings.LMS_ROOT_URL, course_root) image_url = u'{}{}'.format(settings.LMS_ROOT_URL, course_image_url(course)) email_context = {'course_title': course_title, 'course_root': course_root, 'course_url': course_url, 'course_image_url': image_url, 'course_end_date': course_end_date, 'account_settings_url': '{}{}'.format(settings.LMS_ROOT_URL, reverse('account_settings')), 'email_settings_url': '{}{}'.format(settings.LMS_ROOT_URL, reverse('dashboard')), 'platform_name': configuration_helpers.get_value('PLATFORM_NAME', settings.PLATFORM_NAME)} return email_context
[ "def", "_get_course_email_context", "(", "course", ")", ":", "course_id", "=", "course", ".", "id", ".", "to_deprecated_string", "(", ")", "course_title", "=", "course", ".", "display_name", "course_end_date", "=", "get_default_time_display", "(", "course", ".", "...
returns context arguments to apply to all emails .
train
false
28,444
def _update_datacenter_cache_from_objects(session, dcs): while dcs: for dco in dcs.objects: dc_ref = dco.obj ds_refs = [] prop_dict = vm_util.propset_dict(dco.propSet) name = prop_dict.get('name') vmFolder = prop_dict.get('vmFolder') datastore_refs = prop_dict.get('datastore') if datastore_refs: datastore_refs = datastore_refs.ManagedObjectReference for ds in datastore_refs: ds_refs.append(ds.value) else: LOG.debug("Datacenter %s doesn't have any datastore associated with it, ignoring it", name) for ds_ref in ds_refs: _DS_DC_MAPPING[ds_ref] = DcInfo(ref=dc_ref, name=name, vmFolder=vmFolder) dcs = session._call_method(vutil, 'continue_retrieval', dcs)
[ "def", "_update_datacenter_cache_from_objects", "(", "session", ",", "dcs", ")", ":", "while", "dcs", ":", "for", "dco", "in", "dcs", ".", "objects", ":", "dc_ref", "=", "dco", ".", "obj", "ds_refs", "=", "[", "]", "prop_dict", "=", "vm_util", ".", "prop...
updates the datastore/datacenter cache .
train
false
28,445
def encode_trailing_data(raw): lsize = 1 while True: encoded = encint((len(raw) + lsize), forward=False) if (len(encoded) == lsize): break lsize += 1 return (raw + encoded)
[ "def", "encode_trailing_data", "(", "raw", ")", ":", "lsize", "=", "1", "while", "True", ":", "encoded", "=", "encint", "(", "(", "len", "(", "raw", ")", "+", "lsize", ")", ",", "forward", "=", "False", ")", "if", "(", "len", "(", "encoded", ")", ...
given some data in the bytestring raw .
train
false
28,446
def fix_2to3(source, aggressive=True, select=None, ignore=None, filename=u''): if (not aggressive): return source select = (select or []) ignore = (ignore or []) return refactor(source, code_to_2to3(select=select, ignore=ignore), filename=filename)
[ "def", "fix_2to3", "(", "source", ",", "aggressive", "=", "True", ",", "select", "=", "None", ",", "ignore", "=", "None", ",", "filename", "=", "u''", ")", ":", "if", "(", "not", "aggressive", ")", ":", "return", "source", "select", "=", "(", "select...
fix various deprecated code .
train
true
28,447
def add_days_worked(start, days): step = get_step(start) (weeks, plus) = divmod(days, 5) end = (start + (((weeks * 7) + plus) * step)) if (weekday(end) >= 5): end += (2 * step) end += (len([x for x in get_national_holidays(start, (end + step)) if (weekday(x) < 5)]) * step) if (weekday(end) >= 5): end += (2 * step) return end
[ "def", "add_days_worked", "(", "start", ",", "days", ")", ":", "step", "=", "get_step", "(", "start", ")", "(", "weeks", ",", "plus", ")", "=", "divmod", "(", "days", ",", "5", ")", "end", "=", "(", "start", "+", "(", "(", "(", "weeks", "*", "7...
adds date but try to only take days worked into account .
train
false
28,451
def set_builddefaults_facts(facts): if ('builddefaults' in facts): builddefaults = facts['builddefaults'] common = facts['common'] if (('http_proxy' not in builddefaults) and ('http_proxy' in common)): builddefaults['http_proxy'] = common['http_proxy'] if (('https_proxy' not in builddefaults) and ('https_proxy' in common)): builddefaults['https_proxy'] = common['https_proxy'] if (('no_proxy' not in builddefaults) and ('no_proxy' in common)): builddefaults['no_proxy'] = common['no_proxy'] if (('git_http_proxy' not in builddefaults) and ('http_proxy' in builddefaults)): builddefaults['git_http_proxy'] = builddefaults['http_proxy'] if (('git_https_proxy' not in builddefaults) and ('https_proxy' in builddefaults)): builddefaults['git_https_proxy'] = builddefaults['https_proxy'] if (('git_no_proxy' not in builddefaults) and ('no_proxy' in builddefaults)): builddefaults['git_no_proxy'] = builddefaults['no_proxy'] if ('config' in builddefaults): if ('admission_plugin_config' not in facts['master']): facts['master']['admission_plugin_config'] = dict() facts['master']['admission_plugin_config'].update(builddefaults['config']) delete_empty_keys(facts['master']['admission_plugin_config']['BuildDefaults']['configuration']['env']) return facts
[ "def", "set_builddefaults_facts", "(", "facts", ")", ":", "if", "(", "'builddefaults'", "in", "facts", ")", ":", "builddefaults", "=", "facts", "[", "'builddefaults'", "]", "common", "=", "facts", "[", "'common'", "]", "if", "(", "(", "'http_proxy'", "not", ...
set build defaults including setting proxy values from http_proxy .
train
false
28,452
def _read_dict(configparser, dictionary): for (section, keys) in dictionary.items(): section = str(section) configparser.add_section(section) for (key, value) in keys.items(): key = configparser.optionxform(str(key)) if (value is not None): value = str(value) configparser.set(section, key, value)
[ "def", "_read_dict", "(", "configparser", ",", "dictionary", ")", ":", "for", "(", "section", ",", "keys", ")", "in", "dictionary", ".", "items", "(", ")", ":", "section", "=", "str", "(", "section", ")", "configparser", ".", "add_section", "(", "section...
cribbed from python3s configparser .
train
false
28,453
def erasable(msg): if color_enabled: return ((_CLEAR_LINE + msg) + _CURSUR_UP) return msg
[ "def", "erasable", "(", "msg", ")", ":", "if", "color_enabled", ":", "return", "(", "(", "_CLEAR_LINE", "+", "msg", ")", "+", "_CURSUR_UP", ")", "return", "msg" ]
make msg not cause new line when output .
train
false
28,454
def consize(): current_os = platform.system() tuple_xy = None if (current_os == 'Windows'): tuple_xy = _size_windows() if ((current_os in ['Linux', 'Darwin']) or current_os.startswith('CYGWIN')): tuple_xy = _size_linux() return (tuple_xy or (None, None))
[ "def", "consize", "(", ")", ":", "current_os", "=", "platform", ".", "system", "(", ")", "tuple_xy", "=", "None", "if", "(", "current_os", "==", "'Windows'", ")", ":", "tuple_xy", "=", "_size_windows", "(", ")", "if", "(", "(", "current_os", "in", "[",...
getterminalsize() - get width and height of console originally retrieved from: URL .
train
false
28,460
def _image_member_get(context, memb_id, session): query = session.query(models.ImageMember) query = query.filter_by(id=memb_id) return query.one()
[ "def", "_image_member_get", "(", "context", ",", "memb_id", ",", "session", ")", ":", "query", "=", "session", ".", "query", "(", "models", ".", "ImageMember", ")", "query", "=", "query", ".", "filter_by", "(", "id", "=", "memb_id", ")", "return", "query...
fetch an imagemember entity by id .
train
false
28,461
def getTricomplexmatrix(transformWords): tricomplex = [euclidean.getComplexByWords(transformWords)] tricomplex.append(euclidean.getComplexByWords(transformWords, 2)) tricomplex.append(euclidean.getComplexByWords(transformWords, 4)) return tricomplex
[ "def", "getTricomplexmatrix", "(", "transformWords", ")", ":", "tricomplex", "=", "[", "euclidean", ".", "getComplexByWords", "(", "transformWords", ")", "]", "tricomplex", ".", "append", "(", "euclidean", ".", "getComplexByWords", "(", "transformWords", ",", "2",...
get matrixsvg by transformwords .
train
false
28,462
def touchFile(fname, atime=None): if (atime is not None): try: with io.open(fname, u'a'): os.utime(fname, (atime, atime)) return True except OSError as e: if (e.errno == errno.ENOSYS): sickrage.srCore.srLogger.debug(u'File air date stamping not available on your OS. Please disable setting') elif (e.errno == errno.EACCES): sickrage.srCore.srLogger.error((u'File air date stamping failed(Permission denied). Check permissions for file: %s' % fname)) else: sickrage.srCore.srLogger.error((u'File air date stamping failed. The error is: %r' % e)) return False
[ "def", "touchFile", "(", "fname", ",", "atime", "=", "None", ")", ":", "if", "(", "atime", "is", "not", "None", ")", ":", "try", ":", "with", "io", ".", "open", "(", "fname", ",", "u'a'", ")", ":", "os", ".", "utime", "(", "fname", ",", "(", ...
touch a file .
train
false
28,467
def _alias(attr): @property def alias(self): return getattr(self, attr) @alias.setter def alias(self): return setattr(self, attr) return alias
[ "def", "_alias", "(", "attr", ")", ":", "@", "property", "def", "alias", "(", "self", ")", ":", "return", "getattr", "(", "self", ",", "attr", ")", "@", "alias", ".", "setter", "def", "alias", "(", "self", ")", ":", "return", "setattr", "(", "self"...
alias one attribute name to another for backward compatibility .
train
false
28,468
def chunk_index_list(indices): chunks = [] chunk = '' for index in indices: if (len(chunk) < 3072): if (not chunk): chunk = index else: chunk += (',' + index) else: chunks.append(chunk.split(',')) chunk = index chunks.append(chunk.split(',')) return chunks
[ "def", "chunk_index_list", "(", "indices", ")", ":", "chunks", "=", "[", "]", "chunk", "=", "''", "for", "index", "in", "indices", ":", "if", "(", "len", "(", "chunk", ")", "<", "3072", ")", ":", "if", "(", "not", "chunk", ")", ":", "chunk", "=",...
this utility chunks very large index lists into 3kb chunks it measures the size as a csv string .
train
false
28,469
def density(w, **kwargs): if hasattr(w, 'toarray'): d = (float(w.nnz) / (w.shape[0] * w.shape[1])) else: d = (0 if (w is None) else (float((w != 0).sum()) / w.size)) return d
[ "def", "density", "(", "w", ",", "**", "kwargs", ")", ":", "if", "hasattr", "(", "w", ",", "'toarray'", ")", ":", "d", "=", "(", "float", "(", "w", ".", "nnz", ")", "/", "(", "w", ".", "shape", "[", "0", "]", "*", "w", ".", "shape", "[", ...
return the density of a graph .
train
false
28,470
def test_pmf_hist_widths(): (x, h, w) = utils.pmf_hist(a_norm) assert_equal((x[1] - x[0]), w)
[ "def", "test_pmf_hist_widths", "(", ")", ":", "(", "x", ",", "h", ",", "w", ")", "=", "utils", ".", "pmf_hist", "(", "a_norm", ")", "assert_equal", "(", "(", "x", "[", "1", "]", "-", "x", "[", "0", "]", ")", ",", "w", ")" ]
test histogram width is correct .
train
false
28,471
def run_simple(hostname, port, application, use_reloader=False, use_debugger=False, use_evalex=True, extra_files=None, reloader_interval=1, reloader_type='auto', threaded=False, processes=1, request_handler=None, static_files=None, passthrough_errors=False, ssl_context=None): if use_debugger: from werkzeug.debug import DebuggedApplication application = DebuggedApplication(application, use_evalex) if static_files: from werkzeug.wsgi import SharedDataMiddleware application = SharedDataMiddleware(application, static_files) def log_startup(sock): display_hostname = (((hostname not in ('', '*')) and hostname) or 'localhost') if (':' in display_hostname): display_hostname = ('[%s]' % display_hostname) quit_msg = '(Press CTRL+C to quit)' port = sock.getsockname()[1] _log('info', ' * Running on %s://%s:%d/ %s', (((ssl_context is None) and 'http') or 'https'), display_hostname, port, quit_msg) def inner(): try: fd = int(os.environ['WERKZEUG_SERVER_FD']) except (LookupError, ValueError): fd = None srv = make_server(hostname, port, application, threaded, processes, request_handler, passthrough_errors, ssl_context, fd=fd) if (fd is None): log_startup(srv.socket) srv.serve_forever() if use_reloader: if (os.environ.get('WERKZEUG_RUN_MAIN') != 'true'): if ((port == 0) and (not can_open_by_fd)): raise ValueError('Cannot bind to a random port with enabled reloader if the Python interpreter does not support socket opening by fd.') address_family = select_ip_version(hostname, port) s = socket.socket(address_family, socket.SOCK_STREAM) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind((hostname, port)) if hasattr(s, 'set_inheritable'): s.set_inheritable(True) if can_open_by_fd: os.environ['WERKZEUG_SERVER_FD'] = str(s.fileno()) s.listen(LISTEN_QUEUE) log_startup(s) else: s.close() from ._reloader import run_with_reloader run_with_reloader(inner, extra_files, reloader_interval, reloader_type) else: inner()
[ "def", "run_simple", "(", "hostname", ",", "port", ",", "application", ",", "use_reloader", "=", "False", ",", "use_debugger", "=", "False", ",", "use_evalex", "=", "True", ",", "extra_files", "=", "None", ",", "reloader_interval", "=", "1", ",", "reloader_t...
run simple external command and return output .
train
false
28,472
def enable_monitor_mode(iface): global RUN_CONFIG print (((GR + ' [+]') + W) + (' enabling monitor mode on %s...' % ((G + iface) + W))), stdout.flush() call(['airmon-ng', 'start', iface], stdout=DN, stderr=DN) print 'done' RUN_CONFIG.IFACE_TO_TAKE_DOWN = get_iface() if (RUN_CONFIG.TX_POWER > 0): print (((GR + ' [+]') + W) + (' setting Tx power to %s%s%s...' % (G, RUN_CONFIG.TX_POWER, W))), call(['iw', 'reg', 'set', 'BO'], stdout=OUTLOG, stderr=ERRLOG) call(['iwconfig', iface, 'txpower', RUN_CONFIG.TX_POWER], stdout=OUTLOG, stderr=ERRLOG) print 'done' return RUN_CONFIG.IFACE_TO_TAKE_DOWN
[ "def", "enable_monitor_mode", "(", "iface", ")", ":", "global", "RUN_CONFIG", "print", "(", "(", "(", "GR", "+", "' [+]'", ")", "+", "W", ")", "+", "(", "' enabling monitor mode on %s...'", "%", "(", "(", "G", "+", "iface", ")", "+", "W", ")", ")", "...
uses airmon-ng to put a device into monitor mode .
train
false
28,473
def _briggs_helper_function(a, k): if ((k < 0) or (int(k) != k)): raise ValueError('expected a nonnegative integer k') if (k == 0): return (a - 1) elif (k == 1): return (np.sqrt(a) - 1) else: k_hat = k if (np.angle(a) >= (np.pi / 2)): a = np.sqrt(a) k_hat = (k - 1) z0 = (a - 1) a = np.sqrt(a) r = (1 + a) for j in range(1, k_hat): a = np.sqrt(a) r = (r * (1 + a)) r = (z0 / r) return r
[ "def", "_briggs_helper_function", "(", "a", ",", "k", ")", ":", "if", "(", "(", "k", "<", "0", ")", "or", "(", "int", "(", "k", ")", "!=", "k", ")", ")", ":", "raise", "ValueError", "(", "'expected a nonnegative integer k'", ")", "if", "(", "k", "=...
computes r = a^(1 / ) - 1 .
train
false
28,476
def findInvalidUID(): guess = SYSTEM_UID_MAX if (pwd is not None): while True: try: pwd.getpwuid(guess) except KeyError: break else: guess -= 1 return guess
[ "def", "findInvalidUID", "(", ")", ":", "guess", "=", "SYSTEM_UID_MAX", "if", "(", "pwd", "is", "not", "None", ")", ":", "while", "True", ":", "try", ":", "pwd", ".", "getpwuid", "(", "guess", ")", "except", "KeyError", ":", "break", "else", ":", "gu...
by convention .
train
false
28,477
def execute_plan(old_plan, index=None, verbose=False): plan = update_old_plan(old_plan) execute_instructions(plan, index, verbose)
[ "def", "execute_plan", "(", "old_plan", ",", "index", "=", "None", ",", "verbose", "=", "False", ")", ":", "plan", "=", "update_old_plan", "(", "old_plan", ")", "execute_instructions", "(", "plan", ",", "index", ",", "verbose", ")" ]
deprecated: this should conda .
train
false
28,478
def uint8_float(inarray): return ((numpy.asarray(inarray, 'f') / 127.5) - 1)
[ "def", "uint8_float", "(", "inarray", ")", ":", "return", "(", "(", "numpy", ".", "asarray", "(", "inarray", ",", "'f'", ")", "/", "127.5", ")", "-", "1", ")" ]
converts arrays .
train
false
28,479
def sysrq(vm, action='nmi', key='uuid'): ret = {} vmadm = _check_vmadm() if (key not in ['uuid', 'alias', 'hostname']): ret['Error'] = 'Key must be either uuid, alias or hostname' return ret if (action not in ['nmi', 'screenshot']): ret['Error'] = 'Action must be either nmi or screenshot' return ret vm = lookup('{0}={1}'.format(key, vm), one=True) if ('Error' in vm): return vm cmd = '{vmadm} sysrq {uuid} {action}'.format(vmadm=vmadm, uuid=vm, action=action) res = __salt__['cmd.run_all'](cmd) retcode = res['retcode'] if (retcode != 0): ret['Error'] = (res['stderr'] if ('stderr' in res) else _exit_status(retcode)) return ret return True
[ "def", "sysrq", "(", "vm", ",", "action", "=", "'nmi'", ",", "key", "=", "'uuid'", ")", ":", "ret", "=", "{", "}", "vmadm", "=", "_check_vmadm", "(", ")", "if", "(", "key", "not", "in", "[", "'uuid'", ",", "'alias'", ",", "'hostname'", "]", ")", ...
send non-maskable interrupt to vm or capture a screenshot vm : string vm to be targeted action : string nmi or screenshot -- default: nmi key : string [uuid|alias|hostname] value type of vm parameter cli example: .
train
true
28,481
def GetCommandLineFiles(command_line_file_list, recursive, exclude): return _FindPythonFiles(command_line_file_list, recursive, exclude)
[ "def", "GetCommandLineFiles", "(", "command_line_file_list", ",", "recursive", ",", "exclude", ")", ":", "return", "_FindPythonFiles", "(", "command_line_file_list", ",", "recursive", ",", "exclude", ")" ]
return the list of files specified on the command line .
train
false
28,483
def _get_instance_info(): identity = boto.utils.get_instance_identity()['document'] return (identity['instanceId'], identity['region'])
[ "def", "_get_instance_info", "(", ")", ":", "identity", "=", "boto", ".", "utils", ".", "get_instance_identity", "(", ")", "[", "'document'", "]", "return", "(", "identity", "[", "'instanceId'", "]", ",", "identity", "[", "'region'", "]", ")" ]
helper function to return the instance id and region of the master where this pillar is run .
train
false
28,484
def purge_url(path): if settings.DEBUG: return api_key = getattr(settings, 'FASTLY_API_KEY', None) if api_key: response = requests.request('PURGE', 'https://www.python.org{}'.format(path), headers={'Fastly-Key': api_key}) return response return None
[ "def", "purge_url", "(", "path", ")", ":", "if", "settings", ".", "DEBUG", ":", "return", "api_key", "=", "getattr", "(", "settings", ",", "'FASTLY_API_KEY'", ",", "None", ")", "if", "api_key", ":", "response", "=", "requests", ".", "request", "(", "'PUR...
purge a fastly .
train
false
28,485
def _ord_to_str(ordinal, weights): chars = [] for weight in weights: if (ordinal == 0): return ''.join(chars) ordinal -= 1 (index, ordinal) = divmod(ordinal, weight) chars.append(_ALPHABET[index]) return ''.join(chars)
[ "def", "_ord_to_str", "(", "ordinal", ",", "weights", ")", ":", "chars", "=", "[", "]", "for", "weight", "in", "weights", ":", "if", "(", "ordinal", "==", "0", ")", ":", "return", "''", ".", "join", "(", "chars", ")", "ordinal", "-=", "1", "(", "...
reverse function of _str_to_ord .
train
true
28,486
def get_packages(package): return [dirpath for (dirpath, dirnames, filenames) in os.walk(package) if os.path.exists(os.path.join(dirpath, '__init__.py'))]
[ "def", "get_packages", "(", "package", ")", ":", "return", "[", "dirpath", "for", "(", "dirpath", ",", "dirnames", ",", "filenames", ")", "in", "os", ".", "walk", "(", "package", ")", "if", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ...
get all packages which are under dname .
train
true
28,487
def AnyBut(s): ranges = chars_to_ranges(s) ranges.insert(0, (- maxint)) ranges.append(maxint) result = CodeRanges(ranges) result.str = ('AnyBut(%s)' % repr(s)) return result
[ "def", "AnyBut", "(", "s", ")", ":", "ranges", "=", "chars_to_ranges", "(", "s", ")", "ranges", ".", "insert", "(", "0", ",", "(", "-", "maxint", ")", ")", "ranges", ".", "append", "(", "maxint", ")", "result", "=", "CodeRanges", "(", "ranges", ")"...
anybut(s) is an re which matches any character which is not in the string |s| .
train
false
28,488
def toBase64(s): return binascii.b2a_base64(s)[:(-1)]
[ "def", "toBase64", "(", "s", ")", ":", "return", "binascii", ".", "b2a_base64", "(", "s", ")", "[", ":", "(", "-", "1", ")", "]" ]
represent string s as base64 .
train
false
28,490
def _norm_factor(p, k): p = asarray(p, dtype=complex) def G(w): '\n Gain of filter\n ' return abs((k / prod(((1j * w) - p)))) def cutoff(w): '\n When gain = -3 dB, return 0\n ' return (G(w) - (1 / np.sqrt(2))) return optimize.newton(cutoff, 1.5)
[ "def", "_norm_factor", "(", "p", ",", "k", ")", ":", "p", "=", "asarray", "(", "p", ",", "dtype", "=", "complex", ")", "def", "G", "(", "w", ")", ":", "return", "abs", "(", "(", "k", "/", "prod", "(", "(", "(", "1j", "*", "w", ")", "-", "...
numerically find frequency shift to apply to delay-normalized filter such that -3 db point is at 1 rad/sec .
train
false
28,491
def _dict_to_json_pretty(d, sort_keys=True): return json.dumps(d, indent=4, separators=(',', ': '), sort_keys=sort_keys)
[ "def", "_dict_to_json_pretty", "(", "d", ",", "sort_keys", "=", "True", ")", ":", "return", "json", ".", "dumps", "(", "d", ",", "indent", "=", "4", ",", "separators", "=", "(", "','", ",", "': '", ")", ",", "sort_keys", "=", "sort_keys", ")" ]
helper function to generate pretty printed json output .
train
true
28,492
def make_sshnode(test_case): server = create_ssh_server(FilePath(test_case.mktemp())) test_case.addCleanup(server.restore) return ProcessNode.using_ssh(host=unicode(server.ip).encode('ascii'), port=server.port, username='root', private_key=server.key_path)
[ "def", "make_sshnode", "(", "test_case", ")", ":", "server", "=", "create_ssh_server", "(", "FilePath", "(", "test_case", ".", "mktemp", "(", ")", ")", ")", "test_case", ".", "addCleanup", "(", "server", ".", "restore", ")", "return", "ProcessNode", ".", "...
create a processnode that can ssh into the local machine .
train
false
28,493
def parseFileAndReport(filename, _open=file): content = _TagTrackingContentHandler() error = _LocationReportingErrorHandler(content) parser = make_parser() parser.setContentHandler(content) parser.setErrorHandler(error) parser.reset() parser.reset = (lambda : None) parser._parser.UseForeignDTD(True) parser.setEntityResolver(_LocalEntityResolver(filename)) parser.setFeature(feature_validation, False) fObj = _open(filename) try: parser.parse(fObj) except IOError as e: raise process.ProcessingFailure((((e.strerror + ", filename was '") + filename) + "'")) finally: fObj.close() return content.document
[ "def", "parseFileAndReport", "(", "filename", ",", "_open", "=", "file", ")", ":", "content", "=", "_TagTrackingContentHandler", "(", ")", "error", "=", "_LocationReportingErrorHandler", "(", "content", ")", "parser", "=", "make_parser", "(", ")", "parser", ".",...
parse and return the contents of the given lore xhtml document .
train
false
28,494
def _get_latest_metric(metric_code): try: last_metric = Metric.objects.filter(kind__code=metric_code).order_by('-start')[0] return last_metric except IndexError: return None
[ "def", "_get_latest_metric", "(", "metric_code", ")", ":", "try", ":", "last_metric", "=", "Metric", ".", "objects", ".", "filter", "(", "kind__code", "=", "metric_code", ")", ".", "order_by", "(", "'-start'", ")", "[", "0", "]", "return", "last_metric", "...
returns the date of the latest metric value .
train
false
28,495
def test_roberts_diagonal2(): image = np.rot90(np.tri(10, 10, 0), 3) expected = (~ np.rot90((np.tri(10, 10, (-1)).astype(bool) | np.tri(10, 10, (-2)).astype(bool).transpose()))) expected = _mask_filter_result(expected, None) result = filters.roberts(image).astype(bool) assert_close(result, expected)
[ "def", "test_roberts_diagonal2", "(", ")", ":", "image", "=", "np", ".", "rot90", "(", "np", ".", "tri", "(", "10", ",", "10", ",", "0", ")", ",", "3", ")", "expected", "=", "(", "~", "np", ".", "rot90", "(", "(", "np", ".", "tri", "(", "10",...
roberts filter on a diagonal edge should be a diagonal line .
train
false
28,498
def _list_new_metadata(repository_path): return {'/'.join(path.segmentsFrom(repository_path)) for path in repository_path.child('repodata').walk()}
[ "def", "_list_new_metadata", "(", "repository_path", ")", ":", "return", "{", "'/'", ".", "join", "(", "path", ".", "segmentsFrom", "(", "repository_path", ")", ")", "for", "path", "in", "repository_path", ".", "child", "(", "'repodata'", ")", ".", "walk", ...
list the filenames of new and changed repository metadata files .
train
false
28,499
def fakeCallbackCanceller(deferred): deferred.callback('Callback Result')
[ "def", "fakeCallbackCanceller", "(", "deferred", ")", ":", "deferred", ".", "callback", "(", "'Callback Result'", ")" ]
a fake l{defer .
train
false
28,500
def rolling_vwap(df, length): closes = df['close'].values volumes = df['volume'].values product = (closes * volumes) out = full_like(closes, nan) for upper_bound in range(length, (len(closes) + 1)): bounds = slice((upper_bound - length), upper_bound) out[(upper_bound - 1)] = (product[bounds].sum() / volumes[bounds].sum()) return Series(out, index=df.index)
[ "def", "rolling_vwap", "(", "df", ",", "length", ")", ":", "closes", "=", "df", "[", "'close'", "]", ".", "values", "volumes", "=", "df", "[", "'volume'", "]", ".", "values", "product", "=", "(", "closes", "*", "volumes", ")", "out", "=", "full_like"...
simple rolling vwap implementation for testing .
train
false
28,504
def OutLbrace(format='', *args): if format: format = (format + ' {') else: format = '{' VaOutput(format, args) IndentLevel()
[ "def", "OutLbrace", "(", "format", "=", "''", ",", "*", "args", ")", ":", "if", "format", ":", "format", "=", "(", "format", "+", "' {'", ")", "else", ":", "format", "=", "'{'", "VaOutput", "(", "format", ",", "args", ")", "IndentLevel", "(", ")" ]
like output .
train
false