id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
33,489
def aws_from_configuration(region, zone, access_key_id, secret_access_key, cluster_id, session_token=None, validate_region=True): try: return EBSBlockDeviceAPI(ec2_client=ec2_client(region=region, zone=zone, access_key_id=access_key_id, secret_access_key=secret_access_key, session_token=session_token, validate_region=validate_region), cluster_id=cluster_id) except (InvalidRegionError, InvalidZoneError) as e: raise StorageInitializationError(StorageInitializationError.CONFIGURATION_ERROR, *e.args)
[ "def", "aws_from_configuration", "(", "region", ",", "zone", ",", "access_key_id", ",", "secret_access_key", ",", "cluster_id", ",", "session_token", "=", "None", ",", "validate_region", "=", "True", ")", ":", "try", ":", "return", "EBSBlockDeviceAPI", "(", "ec2...
build an ebsblockdeviceapi instance using configuration and credentials .
train
false
33,491
def _shallow_split(filenames, size_per_shard, parsed_formats, sizes): roots = [] inputs = [] shard_size = 0 for (i, size) in enumerate(sizes): shard_size += size inputs.append(_FileRange(filenames[i], None)) if (shard_size >= size_per_shard): roots.append(FileFormatRoot(copy.deepcopy(parsed_formats), inputs)) inputs = [] shard_size = 0 if inputs: roots.append(FileFormatRoot(copy.deepcopy(parsed_formats), inputs)) return roots
[ "def", "_shallow_split", "(", "filenames", ",", "size_per_shard", ",", "parsed_formats", ",", "sizes", ")", ":", "roots", "=", "[", "]", "inputs", "=", "[", "]", "shard_size", "=", "0", "for", "(", "i", ",", "size", ")", "in", "enumerate", "(", "sizes"...
split files into roots only based on top level file sizes .
train
false
33,492
def ensure_data_root(environ=None): ensure_directory(data_root(environ=environ))
[ "def", "ensure_data_root", "(", "environ", "=", "None", ")", ":", "ensure_directory", "(", "data_root", "(", "environ", "=", "environ", ")", ")" ]
ensure that the data root exists .
train
false
33,494
def _get_inter_coords(coords, strand=1): if (strand == (-1)): sorted_coords = [(max(a, b), min(a, b)) for (a, b) in coords] inter_coords = list(chain(*sorted_coords))[1:(-1)] return list(zip(inter_coords[1::2], inter_coords[::2])) else: inter_coords = list(chain(*coords))[1:(-1)] return list(zip(inter_coords[::2], inter_coords[1::2]))
[ "def", "_get_inter_coords", "(", "coords", ",", "strand", "=", "1", ")", ":", "if", "(", "strand", "==", "(", "-", "1", ")", ")", ":", "sorted_coords", "=", "[", "(", "max", "(", "a", ",", "b", ")", ",", "min", "(", "a", ",", "b", ")", ")", ...
from the given pairs of coordinates .
train
false
33,495
def save_obj(obj, save_path): if ((save_path is None) or (len(save_path) == 0)): return save_path = os.path.expandvars(os.path.expanduser(save_path)) logger.debug('serializing object to: %s', save_path) ensure_dirs_exist(save_path) pickle.dump(obj, open(save_path, 'wb'), 2)
[ "def", "save_obj", "(", "obj", ",", "save_path", ")", ":", "if", "(", "(", "save_path", "is", "None", ")", "or", "(", "len", "(", "save_path", ")", "==", "0", ")", ")", ":", "return", "save_path", "=", "os", ".", "path", ".", "expandvars", "(", "...
dumps a python data structure to a saved on-disk representation .
train
false
33,496
def _multi_broadcast(*shapelist): assert shapelist result = shapelist[0] others = shapelist[1:] try: for (i, each) in enumerate(others, start=1): result = _pairwise_broadcast(result, each) except ValueError: raise ValueError('failed to broadcast argument #{0}'.format(i)) else: return result
[ "def", "_multi_broadcast", "(", "*", "shapelist", ")", ":", "assert", "shapelist", "result", "=", "shapelist", "[", "0", "]", "others", "=", "shapelist", "[", "1", ":", "]", "try", ":", "for", "(", "i", ",", "each", ")", "in", "enumerate", "(", "othe...
raises valueerror if broadcast fails .
train
false
33,497
def filters(im, detail=False, sharpen=False, **kwargs): if detail: im = im.filter(ImageFilter.DETAIL) if sharpen: im = im.filter(ImageFilter.SHARPEN) return im
[ "def", "filters", "(", "im", ",", "detail", "=", "False", ",", "sharpen", "=", "False", ",", "**", "kwargs", ")", ":", "if", "detail", ":", "im", "=", "im", ".", "filter", "(", "ImageFilter", ".", "DETAIL", ")", "if", "sharpen", ":", "im", "=", "...
pass the source image through post-processing filters .
train
true
33,499
def display_output(data, out=None, opts=None, **kwargs): if (opts is None): opts = {} display_data = try_printout(data, out, opts, **kwargs) output_filename = opts.get('output_file', None) log.trace('data = {0}'.format(data)) try: if output_filename: if (not hasattr(output_filename, 'write')): ofh = salt.utils.fopen(output_filename, 'a') fh_opened = True else: ofh = output_filename fh_opened = False try: fdata = display_data if isinstance(fdata, six.text_type): try: fdata = fdata.encode('utf-8') except (UnicodeDecodeError, UnicodeEncodeError): pass if fdata: if six.PY3: ofh.write(fdata.decode()) else: ofh.write(fdata) ofh.write('\n') finally: if fh_opened: ofh.close() return if display_data: print_cli(display_data) except IOError as exc: if (exc.errno != errno.EPIPE): raise exc
[ "def", "display_output", "(", "data", ",", "out", "=", "None", ",", "opts", "=", "None", ",", "**", "kwargs", ")", ":", "if", "(", "opts", "is", "None", ")", ":", "opts", "=", "{", "}", "display_data", "=", "try_printout", "(", "data", ",", "out", ...
print the passed data using the desired output .
train
true
33,500
def DosDateTimeToTimeTuple(dosDateTime): dosDate = (dosDateTime >> 16) dosTime = (dosDateTime & 65535) day = (dosDate & 31) month = ((dosDate >> 5) & 15) year = (1980 + (dosDate >> 9)) second = (2 * (dosTime & 31)) minute = ((dosTime >> 5) & 63) hour = (dosTime >> 11) return time.localtime(time.mktime((year, month, day, hour, minute, second, 0, 1, (-1))))
[ "def", "DosDateTimeToTimeTuple", "(", "dosDateTime", ")", ":", "dosDate", "=", "(", "dosDateTime", ">>", "16", ")", "dosTime", "=", "(", "dosDateTime", "&", "65535", ")", "day", "=", "(", "dosDate", "&", "31", ")", "month", "=", "(", "(", "dosDate", ">...
convert an ms-dos format date time to a python time tuple .
train
false
33,502
def _derivate_diff_eq(listofpoly): sol = [] a = (len(listofpoly) - 1) sol.append(DMFdiff(listofpoly[0])) for (i, j) in enumerate(listofpoly[1:]): sol.append((DMFdiff(j) + listofpoly[i])) sol.append(listofpoly[a]) return sol
[ "def", "_derivate_diff_eq", "(", "listofpoly", ")", ":", "sol", "=", "[", "]", "a", "=", "(", "len", "(", "listofpoly", ")", "-", "1", ")", "sol", ".", "append", "(", "DMFdiff", "(", "listofpoly", "[", "0", "]", ")", ")", "for", "(", "i", ",", ...
let a differential equation a0(x)y(x) + a1(x)y(x) + .
train
false
33,505
def get_machine_project(): return get_metadata_path('project/project-id')
[ "def", "get_machine_project", "(", ")", ":", "return", "get_metadata_path", "(", "'project/project-id'", ")" ]
returns the project that the gce instance running this code is running within .
train
false
33,506
def test_disk(): np.random.seed(21) image = np.random.uniform(size=(10, 20)) footprint = np.array([[1]]) result = peak.peak_local_max(image, labels=np.ones((10, 20)), footprint=footprint, min_distance=1, threshold_rel=0, threshold_abs=(-1), indices=False, exclude_border=False) assert np.all(result) result = peak.peak_local_max(image, footprint=footprint, threshold_abs=(-1), indices=False, exclude_border=False) assert np.all(result)
[ "def", "test_disk", "(", ")", ":", "np", ".", "random", ".", "seed", "(", "21", ")", "image", "=", "np", ".", "random", ".", "uniform", "(", "size", "=", "(", "10", ",", "20", ")", ")", "footprint", "=", "np", ".", "array", "(", "[", "[", "1"...
regression test of img-1194 .
train
false
33,508
def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None): tz = pytz.timezone(zone) if (utcoffset is None): return tz utcoffset = memorized_timedelta(utcoffset) dstoffset = memorized_timedelta(dstoffset) try: return tz._tzinfos[(utcoffset, dstoffset, tzname)] except KeyError: pass for localized_tz in tz._tzinfos.values(): if ((localized_tz._utcoffset == utcoffset) and (localized_tz._dst == dstoffset)): return localized_tz inf = (utcoffset, dstoffset, tzname) tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos) return tz._tzinfos[inf]
[ "def", "unpickler", "(", "zone", ",", "utcoffset", "=", "None", ",", "dstoffset", "=", "None", ",", "tzname", "=", "None", ")", ":", "tz", "=", "pytz", ".", "timezone", "(", "zone", ")", "if", "(", "utcoffset", "is", "None", ")", ":", "return", "tz...
factory function for unpickling pytz tzinfo instances .
train
true
33,509
def get_sync(dsk, keys, **kwargs): kwargs.pop('num_workers', None) return get_async(apply_sync, 1, dsk, keys, raise_on_exception=True, **kwargs)
[ "def", "get_sync", "(", "dsk", ",", "keys", ",", "**", "kwargs", ")", ":", "kwargs", ".", "pop", "(", "'num_workers'", ",", "None", ")", "return", "get_async", "(", "apply_sync", ",", "1", ",", "dsk", ",", "keys", ",", "raise_on_exception", "=", "True"...
get the value of sync variable in the make .
train
false
33,510
def getLastModule(): craftSequence = getReadCraftSequence() if (len(craftSequence) < 1): return None return getCraftModule(craftSequence[(-1)])
[ "def", "getLastModule", "(", ")", ":", "craftSequence", "=", "getReadCraftSequence", "(", ")", "if", "(", "len", "(", "craftSequence", ")", "<", "1", ")", ":", "return", "None", "return", "getCraftModule", "(", "craftSequence", "[", "(", "-", "1", ")", "...
get the last tool .
train
false
33,511
def group_create(context, data_dict): if (data_dict.get('type') == 'organization'): raise Exception(_('Trying to create an organization as a group')) _check_access('group_create', context, data_dict) return _group_or_org_create(context, data_dict)
[ "def", "group_create", "(", "context", ",", "data_dict", ")", ":", "if", "(", "data_dict", ".", "get", "(", "'type'", ")", "==", "'organization'", ")", ":", "raise", "Exception", "(", "_", "(", "'Trying to create an organization as a group'", ")", ")", "_check...
create a group from the values dictionary .
train
false
33,512
def _prepare_picks(info, data, picks): if (picks is None): picks = pick_types(info, meg=True, eeg=True, ref_meg=False, exclude='bads') if np.array_equal(picks, np.arange(len(data))): picks = slice(None) else: info = pick_info(info, picks) return (info, data, picks)
[ "def", "_prepare_picks", "(", "info", ",", "data", ",", "picks", ")", ":", "if", "(", "picks", "is", "None", ")", ":", "picks", "=", "pick_types", "(", "info", ",", "meg", "=", "True", ",", "eeg", "=", "True", ",", "ref_meg", "=", "False", ",", "...
prepare the picks .
train
false
33,513
def GetCrypter(secret): return GetSecretsManagerForSecret(secret).GetCrypter(secret)
[ "def", "GetCrypter", "(", "secret", ")", ":", "return", "GetSecretsManagerForSecret", "(", "secret", ")", ".", "GetCrypter", "(", "secret", ")" ]
returns the keyczar crypter object returned by the secrets manager instance getcrypter method .
train
false
33,515
def _post_data(options=None, xml=None): params = {'token': options['token'].strip(), 'cmd': 'submitcheck', 'XMLDATA': xml} res = salt.utils.http.query(url=options['url'], method='POST', params=params, data='', decode=True, status=True, header_dict={}, opts=__opts__) if (res.get('status', None) == salt.ext.six.moves.http_client.OK): if (res.get('dict', None) and isinstance(res['dict'], list)): _content = res['dict'][0] if _content.get('status', None): return True else: return False else: log.error('No content returned from Nagios NRDP.') return False else: log.error('Error returned from Nagios NRDP. Status code: {0}.'.format(res.status_code)) return False
[ "def", "_post_data", "(", "options", "=", "None", ",", "xml", "=", "None", ")", ":", "params", "=", "{", "'token'", ":", "options", "[", "'token'", "]", ".", "strip", "(", ")", ",", "'cmd'", ":", "'submitcheck'", ",", "'XMLDATA'", ":", "xml", "}", ...
post data to nagios nrdp .
train
true
33,516
def _check_versionlock(): if (_yum() == 'dnf'): vl_plugin = 'python-dnf-plugins-extras-versionlock' else: vl_plugin = ('yum-versionlock' if (__grains__.get('osmajorrelease') == '5') else 'yum-plugin-versionlock') if (vl_plugin not in list_pkgs()): raise SaltInvocationError('Cannot proceed, {0} is not installed.'.format(vl_plugin))
[ "def", "_check_versionlock", "(", ")", ":", "if", "(", "_yum", "(", ")", "==", "'dnf'", ")", ":", "vl_plugin", "=", "'python-dnf-plugins-extras-versionlock'", "else", ":", "vl_plugin", "=", "(", "'yum-versionlock'", "if", "(", "__grains__", ".", "get", "(", ...
ensure that the appropriate versionlock plugin is present .
train
false
33,517
@then(u'the command output should not contain') def step_command_output_should_not_contain(context): assert (context.text is not None), 'REQUIRE: multi-line text' step_command_output_should_not_contain_text(context, context.text.strip())
[ "@", "then", "(", "u'the command output should not contain'", ")", "def", "step_command_output_should_not_contain", "(", "context", ")", ":", "assert", "(", "context", ".", "text", "is", "not", "None", ")", ",", "'REQUIRE: multi-line text'", "step_command_output_should_no...
example: when i run "behave .
train
false
33,519
def humanDatetime(value, strip_microsecond=True): text = unicode(value.isoformat()) text = text.replace('T', ' ') if (strip_microsecond and ('.' in text)): text = text.split('.')[0] return text
[ "def", "humanDatetime", "(", "value", ",", "strip_microsecond", "=", "True", ")", ":", "text", "=", "unicode", "(", "value", ".", "isoformat", "(", ")", ")", "text", "=", "text", ".", "replace", "(", "'T'", ",", "' '", ")", "if", "(", "strip_microsecon...
convert a timestamp to unicode string: use iso format with space separator .
train
false
33,520
def get_num_logical_cpus_per_socket(run_function=run): siblings = run_function('grep "^siblings" /proc/cpuinfo').stdout.rstrip() num_siblings = map(int, re.findall('^siblings\\s*:\\s*(\\d+)\\s*$', siblings, re.M)) if (len(num_siblings) == 0): raise error.TestError('Unable to find siblings info in /proc/cpuinfo') if (min(num_siblings) != max(num_siblings)): raise error.TestError(('Number of siblings differ %r' % num_siblings)) return num_siblings[0]
[ "def", "get_num_logical_cpus_per_socket", "(", "run_function", "=", "run", ")", ":", "siblings", "=", "run_function", "(", "'grep \"^siblings\" /proc/cpuinfo'", ")", ".", "stdout", ".", "rstrip", "(", ")", "num_siblings", "=", "map", "(", "int", ",", "re", ".", ...
get the number of cores per cpu .
train
false
33,521
def migrate_consistencygroups_to_groups(context, max_count, force=False): return IMPL.migrate_consistencygroups_to_groups(context, max_count, force)
[ "def", "migrate_consistencygroups_to_groups", "(", "context", ",", "max_count", ",", "force", "=", "False", ")", ":", "return", "IMPL", ".", "migrate_consistencygroups_to_groups", "(", "context", ",", "max_count", ",", "force", ")" ]
migrage cgs to generic volume groups .
train
false
33,522
def test_import_vispy_app1(): modnames = loaded_vispy_modules('vispy.app', 2) assert_equal(modnames, set((_min_modules + ['vispy.app', 'vispy.gloo', 'vispy.glsl', 'vispy.color'])))
[ "def", "test_import_vispy_app1", "(", ")", ":", "modnames", "=", "loaded_vispy_modules", "(", "'vispy.app'", ",", "2", ")", "assert_equal", "(", "modnames", ",", "set", "(", "(", "_min_modules", "+", "[", "'vispy.app'", ",", "'vispy.gloo'", ",", "'vispy.glsl'", ...
importing vispy .
train
false
33,523
def pxe(hostname, timeout=20, username=None, password=None): _cmds = ['racadm config -g cfgServerInfo -o cfgServerFirstBootDevice pxe', 'racadm config -g cfgServerInfo -o cfgServerBootOnce 1', 'racadm serveraction powercycle'] client = __connect(hostname, timeout, username, password) if isinstance(client, paramiko.SSHClient): for (i, cmd) in enumerate(_cmds, 1): log.info('Executing command {0}'.format(i)) (stdin, stdout, stderr) = client.exec_command(cmd) if ('successful' in stdout.readline()): log.info('Executing command: {0}'.format(cmd)) else: log.error('Unable to execute: {0}'.format(cmd)) return False return True
[ "def", "pxe", "(", "hostname", ",", "timeout", "=", "20", ",", "username", "=", "None", ",", "password", "=", "None", ")", ":", "_cmds", "=", "[", "'racadm config -g cfgServerInfo -o cfgServerFirstBootDevice pxe'", ",", "'racadm config -g cfgServerInfo -o cfgServerBootO...
connect to the dell drac and have the boot order set to pxe and power cycle the system to pxe boot cli example: .
train
true
33,525
def get_partial_date_formats(): warnings.warn("'django.utils.translation.get_partial_date_formats' is deprecated. Please update your code to use the new i18n aware formatting.", PendingDeprecationWarning) from django.conf import settings year_month_format = ugettext('YEAR_MONTH_FORMAT') month_day_format = ugettext('MONTH_DAY_FORMAT') if (year_month_format == 'YEAR_MONTH_FORMAT'): year_month_format = settings.YEAR_MONTH_FORMAT if (month_day_format == 'MONTH_DAY_FORMAT'): month_day_format = settings.MONTH_DAY_FORMAT return (year_month_format, month_day_format)
[ "def", "get_partial_date_formats", "(", ")", ":", "warnings", ".", "warn", "(", "\"'django.utils.translation.get_partial_date_formats' is deprecated. Please update your code to use the new i18n aware formatting.\"", ",", "PendingDeprecationWarning", ")", "from", "django", ".", "conf"...
checks whether translation files provide a translation for some technical message id to store partial date formats .
train
false
33,526
def lon2zone(lon): zone = int(round(((lon / 6.0) + 30.5))) return (((zone - 1) % 60) + 1)
[ "def", "lon2zone", "(", "lon", ")", ":", "zone", "=", "int", "(", "round", "(", "(", "(", "lon", "/", "6.0", ")", "+", "30.5", ")", ")", ")", "return", "(", "(", "(", "zone", "-", "1", ")", "%", "60", ")", "+", "1", ")" ]
convert longitude to numeric utm zone .
train
false
33,528
def read_value(addr_space, value_type, vaddr): if (not builtin_types.has_key(value_type)): raise Exception(('Invalid built-in type %s' % value_type)) type_unpack_char = builtin_types[value_type][1] type_size = builtin_types[value_type][0] buf = addr_space.read(vaddr, type_size) if (buf is None): return None (val,) = struct.unpack(type_unpack_char, buf) return val
[ "def", "read_value", "(", "addr_space", ",", "value_type", ",", "vaddr", ")", ":", "if", "(", "not", "builtin_types", ".", "has_key", "(", "value_type", ")", ")", ":", "raise", "Exception", "(", "(", "'Invalid built-in type %s'", "%", "value_type", ")", ")",...
read the low-level value for a built-in type .
train
false
33,529
def assign_params(sess, params, network): ops = [] for (idx, param) in enumerate(params): ops.append(network.all_params[idx].assign(param)) sess.run(ops)
[ "def", "assign_params", "(", "sess", ",", "params", ",", "network", ")", ":", "ops", "=", "[", "]", "for", "(", "idx", ",", "param", ")", "in", "enumerate", "(", "params", ")", ":", "ops", ".", "append", "(", "network", ".", "all_params", "[", "idx...
assign the given parameters to the tensorlayer network .
train
true
33,531
def detect_properties_cloud_storage(uri): vision_client = vision.Client() image = vision_client.image(source_uri=uri) properties = image.detect_properties() for prop in properties: color = prop.colors[0] print 'fraction: {}'.format(color.pixel_fraction) print 'r: {}'.format(color.color.red) print 'g: {}'.format(color.color.green) print 'g: {}'.format(color.color.blue)
[ "def", "detect_properties_cloud_storage", "(", "uri", ")", ":", "vision_client", "=", "vision", ".", "Client", "(", ")", "image", "=", "vision_client", ".", "image", "(", "source_uri", "=", "uri", ")", "properties", "=", "image", ".", "detect_properties", "(",...
detects image properties in the file located in google cloud storage .
train
false
33,532
def interfaces_names(): ret = [] with salt.utils.winapi.Com(): c = wmi.WMI() for iface in c.Win32_NetworkAdapter(NetEnabled=True): ret.append(iface.NetConnectionID) return ret
[ "def", "interfaces_names", "(", ")", ":", "ret", "=", "[", "]", "with", "salt", ".", "utils", ".", "winapi", ".", "Com", "(", ")", ":", "c", "=", "wmi", ".", "WMI", "(", ")", "for", "iface", "in", "c", ".", "Win32_NetworkAdapter", "(", "NetEnabled"...
return a list of all the interfaces names cli example: .
train
true
33,533
def GetCustomerIDs(client): managed_customer_service = client.GetService('ManagedCustomerService', version='v201607') offset = 0 selector = {'fields': ['CustomerId'], 'predicates': [{'field': 'CanManageClients', 'operator': 'EQUALS', 'values': [False]}], 'paging': {'startIndex': str(offset), 'numberResults': str(PAGE_SIZE)}} queue = multiprocessing.Queue() more_pages = True while more_pages: page = managed_customer_service.get(selector) if (page and ('entries' in page) and page['entries']): for entry in page['entries']: queue.put(entry['customerId']) else: raise Exception("Can't retrieve any customer ID.") offset += PAGE_SIZE selector['paging']['startIndex'] = str(offset) more_pages = (offset < int(page['totalNumEntries'])) return queue
[ "def", "GetCustomerIDs", "(", "client", ")", ":", "managed_customer_service", "=", "client", ".", "GetService", "(", "'ManagedCustomerService'", ",", "version", "=", "'v201607'", ")", "offset", "=", "0", "selector", "=", "{", "'fields'", ":", "[", "'CustomerId'"...
retrieves all customerids in the account hierarchy .
train
true
33,534
def _check_config_exists(config_file=None): if (config_file is None): config_file = _config_file() if (not os.path.isfile(config_file)): return False return True
[ "def", "_check_config_exists", "(", "config_file", "=", "None", ")", ":", "if", "(", "config_file", "is", "None", ")", ":", "config_file", "=", "_config_file", "(", ")", "if", "(", "not", "os", ".", "path", ".", "isfile", "(", "config_file", ")", ")", ...
verify the config file is present .
train
true
33,535
def setEpisodeToWanted(show, s, e): epObj = show.getEpisode(int(s), int(e)) if epObj: with epObj.lock: if ((epObj.status != SKIPPED) or (epObj.airdate == date.fromordinal(1))): return sickrage.srCore.srLogger.info((u'Setting episode %s S%02dE%02d to wanted' % (show.name, s, e))) epObj.status = WANTED epObj.saveToDB() sickrage.srCore.SEARCHQUEUE.put(BacklogQueueItem(show, [epObj])) sickrage.srCore.srLogger.info((u'Starting backlog search for %s S%02dE%02d because some episodes were set to wanted' % (show.name, s, e)))
[ "def", "setEpisodeToWanted", "(", "show", ",", "s", ",", "e", ")", ":", "epObj", "=", "show", ".", "getEpisode", "(", "int", "(", "s", ")", ",", "int", "(", "e", ")", ")", "if", "epObj", ":", "with", "epObj", ".", "lock", ":", "if", "(", "(", ...
sets an episode to wanted .
train
false
33,536
def p_small_stmt(p): p[0] = p[1]
[ "def", "p_small_stmt", "(", "p", ")", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
small_stmt : flow_stmt | expr_stmt .
train
false
33,537
def mktime(t9): try: return mktime_real(t9) except OverflowError: raise SkipTest('Platform cannot construct time zone for {0!r}'.format(t9))
[ "def", "mktime", "(", "t9", ")", ":", "try", ":", "return", "mktime_real", "(", "t9", ")", "except", "OverflowError", ":", "raise", "SkipTest", "(", "'Platform cannot construct time zone for {0!r}'", ".", "format", "(", "t9", ")", ")" ]
call l{mktime_real} .
train
false
33,539
def collect_string_fields(format_string): formatter = string.Formatter() try: parseiterator = formatter.parse(format_string) for result in parseiterator: if all(((item is None) for item in result[1:])): continue name = result[1] nested = result[2] (yield name) if nested: for field in collect_string_fields(nested): (yield field) except ValueError: raise utils.IncompleteFormatString(format_string)
[ "def", "collect_string_fields", "(", "format_string", ")", ":", "formatter", "=", "string", ".", "Formatter", "(", ")", "try", ":", "parseiterator", "=", "formatter", ".", "parse", "(", "format_string", ")", "for", "result", "in", "parseiterator", ":", "if", ...
given a format string .
train
false
33,540
def get_address_territory(address_name): territory = None if address_name: address_fields = frappe.db.get_value(u'Address', address_name, [u'city', u'state', u'country']) for value in address_fields: territory = frappe.db.get_value(u'Territory', value) if territory: break return territory
[ "def", "get_address_territory", "(", "address_name", ")", ":", "territory", "=", "None", "if", "address_name", ":", "address_fields", "=", "frappe", ".", "db", ".", "get_value", "(", "u'Address'", ",", "address_name", ",", "[", "u'city'", ",", "u'state'", ",",...
tries to match city .
train
false
33,541
def set_zone(timezone): cmd = ['tzutil', '/s', LINTOWIN[timezone]] return (__salt__['cmd.retcode'](cmd, python_shell=False) == 0)
[ "def", "set_zone", "(", "timezone", ")", ":", "cmd", "=", "[", "'tzutil'", ",", "'/s'", ",", "LINTOWIN", "[", "timezone", "]", "]", "return", "(", "__salt__", "[", "'cmd.retcode'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", "==", "0", "...
set the local time zone .
train
false
33,542
@with_setup(prepare_stdout, registry.clear) def test_jsonreport_output_with_background_section(): @lettuce.step(u'the variable "(\\w+)" holds (\\d+)') @lettuce.step(u'the variable "(\\w+)" is equal to (\\d+)') def just_pass(step, *args): pass with check_jsonreport(u'background_simple'): runner = Runner(bg_feature_name(u'simple'), enable_jsonreport=True) runner.run()
[ "@", "with_setup", "(", "prepare_stdout", ",", "registry", ".", "clear", ")", "def", "test_jsonreport_output_with_background_section", "(", ")", ":", "@", "lettuce", ".", "step", "(", "u'the variable \"(\\\\w+)\" holds (\\\\d+)'", ")", "@", "lettuce", ".", "step", "...
test jsonreport output with a background section in the feature .
train
false
33,543
def test_insert_row(mixin_cols): t = QTable(mixin_cols) t['m'].info.description = 'd' if isinstance(t['m'], (u.Quantity, Column)): t.insert_row(1, t[(-1)]) assert (t[1] == t[(-1)]) assert (t['m'].info.description == 'd') else: with pytest.raises(ValueError) as exc: t.insert_row(1, t[(-1)]) assert ('Unable to insert row' in str(exc.value))
[ "def", "test_insert_row", "(", "mixin_cols", ")", ":", "t", "=", "QTable", "(", "mixin_cols", ")", "t", "[", "'m'", "]", ".", "info", ".", "description", "=", "'d'", "if", "isinstance", "(", "t", "[", "'m'", "]", ",", "(", "u", ".", "Quantity", ","...
test inserting a row .
train
false
33,544
def test_rl_beginning_of_line(lineedit, bridge): lineedit.set_aug_text('f<oo>bar') bridge.rl_beginning_of_line() assert (lineedit.aug_text() == '|foobar')
[ "def", "test_rl_beginning_of_line", "(", "lineedit", ",", "bridge", ")", ":", "lineedit", ".", "set_aug_text", "(", "'f<oo>bar'", ")", "bridge", ".", "rl_beginning_of_line", "(", ")", "assert", "(", "lineedit", ".", "aug_text", "(", ")", "==", "'|foobar'", ")"...
test rl_beginning_of_line .
train
false
33,545
def cum2mc(kappa): mc = [1, 0.0] kappa0 = kappa[0] kappa = ([1] + list(kappa)) for (nn, m) in enumerate(kappa[2:]): n = (nn + 2) mc.append(0) for k in range((n - 1)): mc[n] += ((comb((n - 1), k, exact=1) * kappa[(n - k)]) * mc[k]) mc[1] = kappa0 return mc[1:]
[ "def", "cum2mc", "(", "kappa", ")", ":", "mc", "=", "[", "1", ",", "0.0", "]", "kappa0", "=", "kappa", "[", "0", "]", "kappa", "=", "(", "[", "1", "]", "+", "list", "(", "kappa", ")", ")", "for", "(", "nn", ",", "m", ")", "in", "enumerate",...
convert non-central moments to cumulants recursive formula produces as many cumulants as moments references kenneth lange: numerical analysis for statisticians .
train
false
33,546
def json_query(data, expr): if (not HAS_LIB): raise AnsibleError('You need to install "jmespath" prior to running json_query filter') return jmespath.search(expr, data)
[ "def", "json_query", "(", "data", ",", "expr", ")", ":", "if", "(", "not", "HAS_LIB", ")", ":", "raise", "AnsibleError", "(", "'You need to install \"jmespath\" prior to running json_query filter'", ")", "return", "jmespath", ".", "search", "(", "expr", ",", "data...
query data using jmespath query language .
train
false
33,547
def save_datadir(store): store.sql('CREATE TABLE abe_tmp_datadir AS SELECT * FROM datadir')
[ "def", "save_datadir", "(", "store", ")", ":", "store", ".", "sql", "(", "'CREATE TABLE abe_tmp_datadir AS SELECT * FROM datadir'", ")" ]
copy the datadir table to recreate it with a new column .
train
false
33,549
@memoized def format_reference_chain(chain): v = t = vn = None text = '' if (not chain): text += 'Cannot access memory address' else: first = True for (v, t, vn) in chain: if (t != 'value'): text += ('%s%s ' % (('--> ' if (not first) else ''), format_address(v, t))) else: text += ('%s%s ' % (('--> ' if (not first) else ''), v)) first = False if vn: text += ('(%s)' % vn) elif (v != '0x0'): s = hex2str(v) if is_printable(s, '\x00'): text += ('(%s)' % string_repr(s.split('\x00')[0])) return text
[ "@", "memoized", "def", "format_reference_chain", "(", "chain", ")", ":", "v", "=", "t", "=", "vn", "=", "None", "text", "=", "''", "if", "(", "not", "chain", ")", ":", "text", "+=", "'Cannot access memory address'", "else", ":", "first", "=", "True", ...
colorize a chain of references .
train
false
33,550
def coerce(from_, to, **to_kwargs): def preprocessor(func, argname, arg): if isinstance(arg, from_): return to(arg, **to_kwargs) return arg return preprocessor
[ "def", "coerce", "(", "from_", ",", "to", ",", "**", "to_kwargs", ")", ":", "def", "preprocessor", "(", "func", ",", "argname", ",", "arg", ")", ":", "if", "isinstance", "(", "arg", ",", "from_", ")", ":", "return", "to", "(", "arg", ",", "**", "...
a preprocessing decorator that coerces inputs of a given type by passing them to a callable .
train
true
33,553
def chars_before(chars, match): if (match.start <= 0): return True return (match.input_string[(match.start - 1)] in chars)
[ "def", "chars_before", "(", "chars", ",", "match", ")", ":", "if", "(", "match", ".", "start", "<=", "0", ")", ":", "return", "True", "return", "(", "match", ".", "input_string", "[", "(", "match", ".", "start", "-", "1", ")", "]", "in", "chars", ...
validate the match if left character is in a given sequence .
train
true
33,554
def load_handler(path, *args, **kwargs): i = path.rfind(u'.') (module, attr) = (path[:i], path[(i + 1):]) try: mod = importlib.import_module(module) except ImportError as e: raise ImproperlyConfigured((u'Error importing upload handler module %s: "%s"' % (module, e))) except ValueError: raise ImproperlyConfigured(u'Error importing upload handler module.Is FILE_UPLOAD_HANDLERS a correctly defined list or tuple?') try: cls = getattr(mod, attr) except AttributeError: raise ImproperlyConfigured((u'Module "%s" does not define a "%s" upload handler backend' % (module, attr))) return cls(*args, **kwargs)
[ "def", "load_handler", "(", "path", ",", "*", "args", ",", "**", "kwargs", ")", ":", "i", "=", "path", ".", "rfind", "(", "u'.'", ")", "(", "module", ",", "attr", ")", "=", "(", "path", "[", ":", "i", "]", ",", "path", "[", "(", "i", "+", "...
given a path to a handler .
train
false
33,556
def _get_accumulator_filepath(): return os.path.join(salt.utils.get_accumulator_dir(__opts__['cachedir']), __instance_id__)
[ "def", "_get_accumulator_filepath", "(", ")", ":", "return", "os", ".", "path", ".", "join", "(", "salt", ".", "utils", ".", "get_accumulator_dir", "(", "__opts__", "[", "'cachedir'", "]", ")", ",", "__instance_id__", ")" ]
return accumulator data path .
train
false
33,557
def correct_sign(deg_f, deg_g, s1, rdel, cdel): M = s1[:, :] for i in range(((M.rows - deg_f) - 1), (((M.rows - deg_f) - rdel) - 1), (-1)): M.row_del(i) for i in range((M.rows - 1), ((M.rows - rdel) - 1), (-1)): M.row_del(i) for i in range(cdel): M.col_del((M.rows - 1)) Md = M[:, 0:M.rows] return Md.det()
[ "def", "correct_sign", "(", "deg_f", ",", "deg_g", ",", "s1", ",", "rdel", ",", "cdel", ")", ":", "M", "=", "s1", "[", ":", ",", ":", "]", "for", "i", "in", "range", "(", "(", "(", "M", ".", "rows", "-", "deg_f", ")", "-", "1", ")", ",", ...
used in various subresultant prs algorithms .
train
false
33,558
def build_and_install_packages(packages, install_dir): errors = [] for package in packages: if (not package.build_and_install(install_dir)): msg = ('Unable to build and install %s' % package.name) logging.error(msg) errors.append(msg) return errors
[ "def", "build_and_install_packages", "(", "packages", ",", "install_dir", ")", ":", "errors", "=", "[", "]", "for", "package", "in", "packages", ":", "if", "(", "not", "package", ".", "build_and_install", "(", "install_dir", ")", ")", ":", "msg", "=", "(",...
builds and installs all packages into install_dir .
train
false
33,559
def check_flowgram_ali_exe(): ali_exe = get_flowgram_ali_exe() if (which(ali_exe) is None): raise ApplicationNotFoundError(('The alignment program %s is not accessible via the PATH environment variable.' % ali_exe)) command = ('%s -h' % ali_exe) proc = Popen(command, shell=True, universal_newlines=True, stdout=PIPE, stderr=STDOUT) if (proc.wait() != 0): raise ApplicationError(('Calling %s failed. Check permissions and that it is in fact an executable.' % ali_exe)) result = proc.stdout.read() if (not result.startswith('Usage')): raise ApplicationError(('Calling %s failed. Check permissions and that it is in fact an executable.' % ali_exe)) return True
[ "def", "check_flowgram_ali_exe", "(", ")", ":", "ali_exe", "=", "get_flowgram_ali_exe", "(", ")", "if", "(", "which", "(", "ali_exe", ")", "is", "None", ")", ":", "raise", "ApplicationNotFoundError", "(", "(", "'The alignment program %s is not accessible via the PATH ...
check if we have a working flowgramaligner .
train
false
33,560
def dbserialize(data): return do_pickle(to_pickle(data))
[ "def", "dbserialize", "(", "data", ")", ":", "return", "do_pickle", "(", "to_pickle", "(", "data", ")", ")" ]
serialize to pickled form in one step .
train
false
33,562
def get_env_var_values(install_environment): env_var_dict = {} env_var_dict['REPOSITORY_INSTALL_DIR'] = install_environment.tool_shed_repository_install_dir env_var_dict['INSTALL_DIR'] = install_environment.install_dir env_var_dict['TMP_WORK_DIR'] = install_environment.tmp_work_dir env_var_dict['system_install'] = install_environment.install_dir env_var_dict['__is64bit__'] = (sys.maxsize > (2 ** 32)) return env_var_dict
[ "def", "get_env_var_values", "(", "install_environment", ")", ":", "env_var_dict", "=", "{", "}", "env_var_dict", "[", "'REPOSITORY_INSTALL_DIR'", "]", "=", "install_environment", ".", "tool_shed_repository_install_dir", "env_var_dict", "[", "'INSTALL_DIR'", "]", "=", "...
return a dictionary of values .
train
false
33,566
def primerange(a, b): from sympy.functions.elementary.integers import ceiling if (a >= b): return if (b <= sieve._list[(-1)]): for i in sieve.primerange(a, b): (yield i) return a = (int(ceiling(a)) - 1) b = int(ceiling(b)) while 1: a = nextprime(a) if (a < b): (yield a) else: return
[ "def", "primerange", "(", "a", ",", "b", ")", ":", "from", "sympy", ".", "functions", ".", "elementary", ".", "integers", "import", "ceiling", "if", "(", "a", ">=", "b", ")", ":", "return", "if", "(", "b", "<=", "sieve", ".", "_list", "[", "(", "...
generate a list of all prime numbers in the range [a .
train
false
33,567
def DeConv2d(net, n_out_channel=32, filter_size=(3, 3), out_size=(30, 30), strides=(2, 2), padding='SAME', batch_size=None, act=None, W_init=tf.truncated_normal_initializer(stddev=0.02), b_init=tf.constant_initializer(value=0.0), W_init_args={}, b_init_args={}, name='decnn2d'): if (act is None): act = tf.identity if (batch_size is None): batch_size = tf.shape(net.outputs)[0] net = DeConv2dLayer(layer=net, act=act, shape=[filter_size[0], filter_size[1], n_out_channel, int(net.outputs.get_shape()[(-1)])], output_shape=[batch_size, int(out_size[0]), int(out_size[1]), n_out_channel], strides=[1, strides[0], strides[1], 1], padding=padding, W_init=W_init, b_init=b_init, W_init_args=W_init_args, b_init_args=b_init_args, name=name) return net
[ "def", "DeConv2d", "(", "net", ",", "n_out_channel", "=", "32", ",", "filter_size", "=", "(", "3", ",", "3", ")", ",", "out_size", "=", "(", "30", ",", "30", ")", ",", "strides", "=", "(", "2", ",", "2", ")", ",", "padding", "=", "'SAME'", ",",...
wrapper for :class:deconv2dlayer .
train
false
33,569
def to_safe(word): return re.sub('[^A-Za-z0-9\\-]', '_', word)
[ "def", "to_safe", "(", "word", ")", ":", "return", "re", ".", "sub", "(", "'[^A-Za-z0-9\\\\-]'", ",", "'_'", ",", "word", ")" ]
converts bad characters in a string to underscores so they can be used as ansible groups .
train
false
33,571
def fulltext_search(query, lang, params): pks = set() search = {'source': False, 'context': False, 'target': False, 'comment': False, 'location': False} search.update(params) if (search['source'] or search['context'] or search['location']): pks.update(base_search(get_source_index(), query, ('source', 'context', 'location'), search, SourceSchema())) if (search['target'] or search['comment']): pks.update(base_search(get_target_index(lang), query, ('target', 'comment'), search, TargetSchema())) return pks
[ "def", "fulltext_search", "(", "query", ",", "lang", ",", "params", ")", ":", "pks", "=", "set", "(", ")", "search", "=", "{", "'source'", ":", "False", ",", "'context'", ":", "False", ",", "'target'", ":", "False", ",", "'comment'", ":", "False", ",...
performs fulltext search in given areas .
train
false
33,572
def dmp_resultant(f, g, u, K, includePRS=False): if (not u): return dup_resultant(f, g, K, includePRS=includePRS) if includePRS: return dmp_prs_resultant(f, g, u, K) if K.has_Field: if (K.is_QQ and query('USE_COLLINS_RESULTANT')): return dmp_qq_collins_resultant(f, g, u, K) elif (K.is_ZZ and query('USE_COLLINS_RESULTANT')): return dmp_zz_collins_resultant(f, g, u, K) return dmp_prs_resultant(f, g, u, K)[0]
[ "def", "dmp_resultant", "(", "f", ",", "g", ",", "u", ",", "K", ",", "includePRS", "=", "False", ")", ":", "if", "(", "not", "u", ")", ":", "return", "dup_resultant", "(", "f", ",", "g", ",", "K", ",", "includePRS", "=", "includePRS", ")", "if", ...
computes resultant of two polynomials in k[x] .
train
false
33,573
def _runscript(scriptname, sandbox=False): namespace = {'__name__': '__main__'} namespace['sys'] = globals()['sys'] try: pkg_resources.get_distribution('khmer').run_script(scriptname, namespace) return 0 except pkg_resources.ResolutionError: pass if sandbox: path = os.path.join(os.path.dirname(__file__), '../sandbox') else: path = scriptpath() scriptfile = os.path.join(path, scriptname) if os.path.isfile(scriptfile): if os.path.isfile(scriptfile): exec compile(open(scriptfile).read(), scriptfile, 'exec') in namespace return 0 elif sandbox: pytest.skip('sandbox tests are only run in a repository.') return (-1)
[ "def", "_runscript", "(", "scriptname", ",", "sandbox", "=", "False", ")", ":", "namespace", "=", "{", "'__name__'", ":", "'__main__'", "}", "namespace", "[", "'sys'", "]", "=", "globals", "(", ")", "[", "'sys'", "]", "try", ":", "pkg_resources", ".", ...
find & run a script with exec .
train
false
33,574
def get_datetime_now(): try: from django.utils import timezone return timezone.now() except ImportError: return datetime.datetime.now()
[ "def", "get_datetime_now", "(", ")", ":", "try", ":", "from", "django", ".", "utils", "import", "timezone", "return", "timezone", ".", "now", "(", ")", "except", "ImportError", ":", "return", "datetime", ".", "datetime", ".", "now", "(", ")" ]
returns datetime object with current point in time .
train
true
33,575
def getLoopsFromUnprovenMesh(edges, faces, importRadius, vertexes, z): edgePairTable = {} corners = [] remainingEdgeTable = getRemainingEdgeTable(edges, vertexes, z) remainingEdgeTableKeys = remainingEdgeTable.keys() for remainingEdgeIndexKey in remainingEdgeTable: edge = remainingEdgeTable[remainingEdgeIndexKey] carveIntersection = getCarveIntersectionFromEdge(edge, vertexes, z) corners.append(carveIntersection) for edgeFaceIndex in edge.faceIndexes: face = faces[edgeFaceIndex] for edgeIndex in face.edgeIndexes: addEdgePair(edgePairTable, edges, edgeIndex, remainingEdgeIndexKey, remainingEdgeTable) allPoints = corners[:] for edgePairValue in edgePairTable.values(): addPointsAtZ(edgePairValue, allPoints, importRadius, vertexes, z) pointTable = {} return getDescendingAreaLoops(allPoints, corners, importRadius)
[ "def", "getLoopsFromUnprovenMesh", "(", "edges", ",", "faces", ",", "importRadius", ",", "vertexes", ",", "z", ")", ":", "edgePairTable", "=", "{", "}", "corners", "=", "[", "]", "remainingEdgeTable", "=", "getRemainingEdgeTable", "(", "edges", ",", "vertexes"...
get loops from a carve of an unproven mesh .
train
false
33,576
@treeio_login_required @handle_response_format def index_status(request, response_format='html'): query = Q(status__hidden=False) if request.GET: if (('status' in request.GET) and request.GET['status']): query = _get_filter_query(request.GET) else: query = (query & _get_filter_query(request.GET)) orders = Object.filter_by_request(request, SaleOrder.objects.filter(query), mode='r') statuses = Object.filter_by_request(request, SaleStatus.objects, mode='r') filters = OrderFilterForm(request.user.profile, '', request.GET) total = 0 for status in statuses: status.count = 0 for order in orders: if (order.status == status): if (order.status.hidden is False): total += 1 status.count += order.quantity return render_to_response('sales/index_status', {'orders': orders, 'statuses': statuses, 'total': total, 'filters': filters}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "index_status", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "query", "=", "Q", "(", "status__hidden", "=", "False", ")", "if", "request", ".", "GET", ":", "if", "(", "("...
index status .
train
false
33,580
def request_stream(data_stream, rate, interim_results=True): recognition_config = cloud_speech_pb2.RecognitionConfig(encoding='LINEAR16', sample_rate=rate, language_code='en-US') streaming_config = cloud_speech_pb2.StreamingRecognitionConfig(interim_results=interim_results, config=recognition_config) (yield cloud_speech_pb2.StreamingRecognizeRequest(streaming_config=streaming_config)) for data in data_stream: (yield cloud_speech_pb2.StreamingRecognizeRequest(audio_content=data))
[ "def", "request_stream", "(", "data_stream", ",", "rate", ",", "interim_results", "=", "True", ")", ":", "recognition_config", "=", "cloud_speech_pb2", ".", "RecognitionConfig", "(", "encoding", "=", "'LINEAR16'", ",", "sample_rate", "=", "rate", ",", "language_co...
yields streamingrecognizerequests constructed from a recording audio stream .
train
false
33,581
def _override_size(vm_): vm_size = get_size(vm_) if ('cores' in vm_): vm_size['cores'] = vm_['cores'] if ('ram' in vm_): vm_size['ram'] = vm_['ram'] return vm_size
[ "def", "_override_size", "(", "vm_", ")", ":", "vm_size", "=", "get_size", "(", "vm_", ")", "if", "(", "'cores'", "in", "vm_", ")", ":", "vm_size", "[", "'cores'", "]", "=", "vm_", "[", "'cores'", "]", "if", "(", "'ram'", "in", "vm_", ")", ":", "...
apply any extra component overrides to vm from the cloud profile .
train
true
33,583
def check_hash(path, file_hash): path = os.path.expanduser(path) hash_parts = file_hash.split(':', 1) if (len(hash_parts) != 2): hash_parts = file_hash.split('=', 1) if (len(hash_parts) != 2): raise ValueError("Bad hash format: '{0}'".format(file_hash)) (hash_form, hash_value) = hash_parts return (get_hash(path, hash_form) == hash_value)
[ "def", "check_hash", "(", "path", ",", "file_hash", ")", ":", "path", "=", "os", ".", "path", ".", "expanduser", "(", "path", ")", "hash_parts", "=", "file_hash", ".", "split", "(", "':'", ",", "1", ")", "if", "(", "len", "(", "hash_parts", ")", "!...
returns true if hash does not match the previous one .
train
false
33,585
@contextfunction def finance_transaction_list(context, transactions, skip_group=False): request = context['request'] response_format = 'html' if ('response_format' in context): response_format = context['response_format'] return Markup(render_to_string('finance/tags/transaction_list', {'transactions': transactions, 'skip_group': skip_group}, context_instance=RequestContext(request), response_format=response_format))
[ "@", "contextfunction", "def", "finance_transaction_list", "(", "context", ",", "transactions", ",", "skip_group", "=", "False", ")", ":", "request", "=", "context", "[", "'request'", "]", "response_format", "=", "'html'", "if", "(", "'response_format'", "in", "...
print a list of orders .
train
false
33,586
def _reduce_dict(count_dict, partial_key): L = len(partial_key) count = sum((v for (k, v) in iteritems(count_dict) if (k[:L] == partial_key))) return count
[ "def", "_reduce_dict", "(", "count_dict", ",", "partial_key", ")", ":", "L", "=", "len", "(", "partial_key", ")", "count", "=", "sum", "(", "(", "v", "for", "(", "k", ",", "v", ")", "in", "iteritems", "(", "count_dict", ")", "if", "(", "k", "[", ...
make partial sum on a counter dict .
train
false
33,587
def countit(fields, iter_of_iter, default=None): C = Counter() T = namedtuple('Thing', fields) get = default_iget(default=default, *fields) return Counter((T(*get(thing)) for thing in iter_of_iter))
[ "def", "countit", "(", "fields", ",", "iter_of_iter", ",", "default", "=", "None", ")", ":", "C", "=", "Counter", "(", ")", "T", "=", "namedtuple", "(", "'Thing'", ",", "fields", ")", "get", "=", "default_iget", "(", "default", "=", "default", ",", "...
note: robust to fields not being in i_of_i .
train
false
33,588
def PPO(ds, count, fastperiod=(- (2 ** 31)), slowperiod=(- (2 ** 31)), matype=0): return call_talib_with_ds(ds, count, talib.PPO, fastperiod, slowperiod, matype)
[ "def", "PPO", "(", "ds", ",", "count", ",", "fastperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ",", "slowperiod", "=", "(", "-", "(", "2", "**", "31", ")", ")", ",", "matype", "=", "0", ")", ":", "return", "call_talib_with_ds", "(", ...
percentage price oscillator .
train
false
33,589
def _prep_input(kwargs): for kwarg in ('environment', 'lxc_conf'): kwarg_value = kwargs.get(kwarg) if ((kwarg_value is not None) and (not isinstance(kwarg_value, six.string_types))): err = 'Invalid {0} configuration. See the documentation for proper usage.'.format(kwarg) if salt.utils.is_dictlist(kwarg_value): new_kwarg_value = salt.utils.repack_dictlist(kwarg_value) if (not kwarg_value): raise SaltInvocationError(err) kwargs[kwarg] = new_kwarg_value if (not isinstance(kwargs[kwarg], dict)): raise SaltInvocationError(err)
[ "def", "_prep_input", "(", "kwargs", ")", ":", "for", "kwarg", "in", "(", "'environment'", ",", "'lxc_conf'", ")", ":", "kwarg_value", "=", "kwargs", ".", "get", "(", "kwarg", ")", "if", "(", "(", "kwarg_value", "is", "not", "None", ")", "and", "(", ...
repack data that should be in a dict but is easier to configure in an sls file as a dictlist .
train
false
33,590
def test_multiplication(): assert ((2 * 2) == 4)
[ "def", "test_multiplication", "(", ")", ":", "assert", "(", "(", "2", "*", "2", ")", "==", "4", ")" ]
check some advanced maths .
train
false
33,591
def rc_params_from_file(fname, fail_on_error=False, use_default_template=True): config_from_file = _rc_params_in_file(fname, fail_on_error) if (not use_default_template): return config_from_file iter_params = six.iteritems(defaultParams) config = RcParams([(key, default) for (key, (default, _)) in iter_params if (key not in _all_deprecated)]) config.update(config_from_file) verbose.set_level(config[u'verbose.level']) verbose.set_fileo(config[u'verbose.fileo']) if (config[u'datapath'] is None): config[u'datapath'] = get_data_path() if (not (config[u'text.latex.preamble'] == [u''])): verbose.report((u'\n*****************************************************************\nYou have the following UNSUPPORTED LaTeX preamble customizations:\n%s\nPlease do not ask for support with these customizations active.\n*****************************************************************\n' % u'\n'.join(config[u'text.latex.preamble'])), u'helpful') verbose.report((u'loaded rc file %s' % fname)) return config
[ "def", "rc_params_from_file", "(", "fname", ",", "fail_on_error", "=", "False", ",", "use_default_template", "=", "True", ")", ":", "config_from_file", "=", "_rc_params_in_file", "(", "fname", ",", "fail_on_error", ")", "if", "(", "not", "use_default_template", ")...
return :class:matplotlib .
train
false
33,592
def import_string(import_name, silent=False): if isinstance(import_name, unicode): import_name = str(import_name) try: if (':' in import_name): (module, obj) = import_name.split(':', 1) elif ('.' in import_name): (module, obj) = import_name.rsplit('.', 1) else: return __import__(import_name) if isinstance(obj, unicode): obj = obj.encode('utf-8') try: return getattr(__import__(module, None, None, [obj]), obj) except (ImportError, AttributeError): modname = ((module + '.') + obj) __import__(modname) return sys.modules[modname] except ImportError as e: if (not silent): raise ImportStringError(import_name, e), None, sys.exc_info()[2]
[ "def", "import_string", "(", "import_name", ",", "silent", "=", "False", ")", ":", "if", "isinstance", "(", "import_name", ",", "unicode", ")", ":", "import_name", "=", "str", "(", "import_name", ")", "try", ":", "if", "(", "':'", "in", "import_name", ")...
imports an object based on a string .
train
true
33,595
def validate_bug_tracker(input_url): try: test_url = input_url.replace(u'%%', u'') if (test_url.find(u'%s') == (-1)): raise TypeError test_url = (test_url % 1) except (TypeError, ValueError): raise ValidationError([(_(u"%s has invalid format specification type(s). Use only one '%%s' to mark the location of the bug id. If the URL contains encoded values (e.g. '%%20'), prepend the encoded values with an additional '%%'.") % input_url)])
[ "def", "validate_bug_tracker", "(", "input_url", ")", ":", "try", ":", "test_url", "=", "input_url", ".", "replace", "(", "u'%%'", ",", "u''", ")", "if", "(", "test_url", ".", "find", "(", "u'%s'", ")", "==", "(", "-", "1", ")", ")", ":", "raise", ...
validate a bug tracker url .
train
false
33,596
def gcs_post_request(url): return requests.request('POST', url, verify=False)
[ "def", "gcs_post_request", "(", "url", ")", ":", "return", "requests", ".", "request", "(", "'POST'", ",", "url", ",", "verify", "=", "False", ")" ]
performs a post request to the given url .
train
false
33,597
def build_convergence_loop_fsm(reactor, deployer): loop = ConvergenceLoop(reactor, deployer) fsm = constructFiniteStateMachine(inputs=ConvergenceLoopInputs, outputs=ConvergenceLoopOutputs, states=ConvergenceLoopStates, initial=ConvergenceLoopStates.STOPPED, table=_CONVERGENCE_LOOP_FSM_TABLE, richInputs=[_ClientStatusUpdate, _Sleep], inputContext={}, world=MethodSuffixOutputer(loop)) loop.fsm = fsm return fsm
[ "def", "build_convergence_loop_fsm", "(", "reactor", ",", "deployer", ")", ":", "loop", "=", "ConvergenceLoop", "(", "reactor", ",", "deployer", ")", "fsm", "=", "constructFiniteStateMachine", "(", "inputs", "=", "ConvergenceLoopInputs", ",", "outputs", "=", "Conv...
create a convergence loop fsm .
train
false
33,598
def default_payment_handler(request, order_form, order): pass
[ "def", "default_payment_handler", "(", "request", ",", "order_form", ",", "order", ")", ":", "pass" ]
default payment handler - called when the final step of the checkout process with payment information is submitted .
train
false
33,599
def split_s3_bucket_key(s3_path): if s3_path.startswith('s3://'): s3_path = s3_path[5:] return find_bucket_key(s3_path)
[ "def", "split_s3_bucket_key", "(", "s3_path", ")", ":", "if", "s3_path", ".", "startswith", "(", "'s3://'", ")", ":", "s3_path", "=", "s3_path", "[", "5", ":", "]", "return", "find_bucket_key", "(", "s3_path", ")" ]
split s3 path into bucket and key prefix .
train
false
33,600
def fetch_github_activity(gen, metadata): if (u'GITHUB_ACTIVITY_FEED' in gen.settings.keys()): gen.context[u'github_activity'] = gen.plugin_instance.fetch()
[ "def", "fetch_github_activity", "(", "gen", ",", "metadata", ")", ":", "if", "(", "u'GITHUB_ACTIVITY_FEED'", "in", "gen", ".", "settings", ".", "keys", "(", ")", ")", ":", "gen", ".", "context", "[", "u'github_activity'", "]", "=", "gen", ".", "plugin_inst...
registered handler for the github activity plugin it puts in generator .
train
true
33,601
def unwrap_tasks(module, hide_nontasks=False): set_tasks = [] for (name, obj) in vars(module).items(): if isinstance(obj, WrappedCallableTask): setattr(module, obj.name, obj.wrapped) set_tasks.append(obj.name) obj.wrapped.func_name = obj.name else: if (name in set_tasks): continue has_docstring = getattr(obj, '__doc__', False) if (hide_nontasks and has_docstring and (not name.startswith('_'))): setattr(module, ('_%s' % name), obj) delattr(module, name)
[ "def", "unwrap_tasks", "(", "module", ",", "hide_nontasks", "=", "False", ")", ":", "set_tasks", "=", "[", "]", "for", "(", "name", ",", "obj", ")", "in", "vars", "(", "module", ")", ".", "items", "(", ")", ":", "if", "isinstance", "(", "obj", ",",...
replace task objects on module with their wrapped functions instead .
train
false
33,603
def h5_hist_data(filename, epoch_axis=True): ret = list() with h5py.File(filename, 'r') as f: if ('hist' in f): (hists, config) = [f[x] for x in ['hist', 'config']] (bins, offset, time_markers) = [hists.attrs[x] for x in ['bins', 'offset', 'time_markers']] total_epochs = config.attrs['total_epochs'] total_minibatches = config.attrs['total_minibatches'] for (hname, hdata) in hists.items(): dw = (total_epochs if (time_markers == 'epoch_freq') else total_minibatches) dh = bins ret.append((hname, hdata[...], dh, dw, bins, offset)) return ret
[ "def", "h5_hist_data", "(", "filename", ",", "epoch_axis", "=", "True", ")", ":", "ret", "=", "list", "(", ")", "with", "h5py", ".", "File", "(", "filename", ",", "'r'", ")", "as", "f", ":", "if", "(", "'hist'", "in", "f", ")", ":", "(", "hists",...
read histogram data from hdf5 file .
train
false
33,604
@preloaderPause def GetUserAuthCode(): try: code = None if (code is None): code = rawInput('Two-Factor Authentication Code: ') except (KeyboardInterrupt, SystemExit) as e: if e.message: tools.exit(e.message) else: tools.exit return code
[ "@", "preloaderPause", "def", "GetUserAuthCode", "(", ")", ":", "try", ":", "code", "=", "None", "if", "(", "code", "is", "None", ")", ":", "code", "=", "rawInput", "(", "'Two-Factor Authentication Code: '", ")", "except", "(", "KeyboardInterrupt", ",", "Sys...
prompts the user for a two factor auth code .
train
false
33,605
def set_change_request_state(change_id, state='approved'): client = _get_client() client.table = 'change_request' record = client.get({'number': change_id}) if ((record is None) or (len(record) == 0)): log.error('Failed to fetch change record, maybe it does not exist?') return False sys_id = record[0]['sys_id'] response = client.update({'approval': state}, sys_id) return response
[ "def", "set_change_request_state", "(", "change_id", ",", "state", "=", "'approved'", ")", ":", "client", "=", "_get_client", "(", ")", "client", ".", "table", "=", "'change_request'", "record", "=", "client", ".", "get", "(", "{", "'number'", ":", "change_i...
set the approval state of a change request/record .
train
true
33,607
def generate_context(context_file=u'cookiecutter.json', default_context=None, extra_context=None): context = {} try: with open(context_file) as file_handle: obj = json.load(file_handle, object_pairs_hook=OrderedDict) except ValueError as e: full_fpath = os.path.abspath(context_file) json_exc_message = str(e) our_exc_message = u'JSON decoding error while loading "{0}". Decoding error details: "{1}"'.format(full_fpath, json_exc_message) raise ContextDecodingException(our_exc_message) file_name = os.path.split(context_file)[1] file_stem = file_name.split(u'.')[0] context[file_stem] = obj if default_context: apply_overwrites_to_context(obj, default_context) if extra_context: apply_overwrites_to_context(obj, extra_context) logger.debug(u'Context generated is {}'.format(context)) return context
[ "def", "generate_context", "(", "context_file", "=", "u'cookiecutter.json'", ",", "default_context", "=", "None", ",", "extra_context", "=", "None", ")", ":", "context", "=", "{", "}", "try", ":", "with", "open", "(", "context_file", ")", "as", "file_handle", ...
generate the context for a cookiecutter project template .
train
true
33,609
def central_server_down_or_error(error_msg): if error_msg: from kalite.version import user_agent if (requests.get(settings.CENTRAL_SERVER_URL, headers={'user-agent': user_agent()}).status_code != 200): return {'error_msg': _('Central Server is not reachable; please try again after some time.')} else: return {'error_msg': error_msg}
[ "def", "central_server_down_or_error", "(", "error_msg", ")", ":", "if", "error_msg", ":", "from", "kalite", ".", "version", "import", "user_agent", "if", "(", "requests", ".", "get", "(", "settings", ".", "CENTRAL_SERVER_URL", ",", "headers", "=", "{", "'user...
if the central server is down .
train
false
33,610
def ReadCronConfig(croninfo_path, parse_cron_config=croninfo.LoadSingleCron): try: croninfo_file = file(croninfo_path, 'r') except IOError as e: raise InvalidAppConfigError(('Cron configuration could not be read from "%s": %s' % (croninfo_path, e))) try: return parse_cron_config(croninfo_file) finally: croninfo_file.close()
[ "def", "ReadCronConfig", "(", "croninfo_path", ",", "parse_cron_config", "=", "croninfo", ".", "LoadSingleCron", ")", ":", "try", ":", "croninfo_file", "=", "file", "(", "croninfo_path", ",", "'r'", ")", "except", "IOError", "as", "e", ":", "raise", "InvalidAp...
reads cron .
train
false
33,611
def short_path(path, cwd=None): if (not isinstance(path, str)): return path if (cwd is None): cwd = os.getcwd() abspath = os.path.abspath(path) relpath = os.path.relpath(path, cwd) if (len(abspath) <= len(relpath)): return abspath return relpath
[ "def", "short_path", "(", "path", ",", "cwd", "=", "None", ")", ":", "if", "(", "not", "isinstance", "(", "path", ",", "str", ")", ")", ":", "return", "path", "if", "(", "cwd", "is", "None", ")", ":", "cwd", "=", "os", ".", "getcwd", "(", ")", ...
return relative or absolute path name .
train
true
33,612
def add_text_label_from_index(df): text = [] for idx in df.index: row_text = '' if isinstance(idx, tuple): for lev in reversed(idx): if ((lev is not '') and (row_text == '')): row_text = str(lev) else: row_text = str(idx) text.append(row_text) df['text'] = text return df
[ "def", "add_text_label_from_index", "(", "df", ")", ":", "text", "=", "[", "]", "for", "idx", "in", "df", ".", "index", ":", "row_text", "=", "''", "if", "isinstance", "(", "idx", ",", "tuple", ")", ":", "for", "lev", "in", "reversed", "(", "idx", ...
add column for text label .
train
false
33,613
def all_monitors(account_name, debug=False): monitor_dict = {} account = get_account_by_name(account_name) account_manager = account_registry.get(account.account_type.name)() for watcher_class in watcher_registry.itervalues(): if account_manager.is_compatible_with_account_type(watcher_class.account_type): monitor = Monitor(watcher_class, account, debug) monitor_dict[monitor.watcher.index] = monitor for mon in monitor_dict.values(): if (len(mon.auditors) > 0): path = [mon.watcher.index] _set_dependency_hierarchies(monitor_dict, mon, path, (mon.audit_tier + 1)) monitors = sorted(monitor_dict.values(), key=(lambda item: item.audit_tier), reverse=True) return monitors
[ "def", "all_monitors", "(", "account_name", ",", "debug", "=", "False", ")", ":", "monitor_dict", "=", "{", "}", "account", "=", "get_account_by_name", "(", "account_name", ")", "account_manager", "=", "account_registry", ".", "get", "(", "account", ".", "acco...
returns a list of all monitors in the correct audit order which apply to one or more of the accounts .
train
false
33,614
def memory_map(attrs=None, where=None): if (__grains__['os_family'] in ['RedHat', 'Debian']): return _osquery_cmd(table='memory_map', attrs=attrs, where=where) return {'result': False, 'comment': 'Only available on Red Hat or Debian based systems.'}
[ "def", "memory_map", "(", "attrs", "=", "None", ",", "where", "=", "None", ")", ":", "if", "(", "__grains__", "[", "'os_family'", "]", "in", "[", "'RedHat'", ",", "'Debian'", "]", ")", ":", "return", "_osquery_cmd", "(", "table", "=", "'memory_map'", "...
return memory_map information from osquery cli example: .
train
true
33,615
def get_config_keys(): global _allowed_config_keys return _allowed_config_keys.copy()
[ "def", "get_config_keys", "(", ")", ":", "global", "_allowed_config_keys", "return", "_allowed_config_keys", ".", "copy", "(", ")" ]
the config keys known by vispy and their allowed data types .
train
false
33,617
def mock_decorator_with_params(*oargs, **okwargs): def inner(fn, *iargs, **ikwargs): if hasattr(fn, '__call__'): return fn else: return Mock() return inner
[ "def", "mock_decorator_with_params", "(", "*", "oargs", ",", "**", "okwargs", ")", ":", "def", "inner", "(", "fn", ",", "*", "iargs", ",", "**", "ikwargs", ")", ":", "if", "hasattr", "(", "fn", ",", "'__call__'", ")", ":", "return", "fn", "else", ":"...
optionally mock a decorator that takes parameters e .
train
true
33,619
def dynamic_importer(name, class_name=None): try: (fp, pathname, description) = imp.find_module(name) except ImportError: print ('unable to locate module: ' + name) return (None, None) try: package = imp.load_module(name, fp, pathname, description) except Exception: raise if class_name: try: _class = imp.load_module(('%s.%s' % (name, class_name)), fp, pathname, description) except Exception: raise return (package, _class) else: return (package, None)
[ "def", "dynamic_importer", "(", "name", ",", "class_name", "=", "None", ")", ":", "try", ":", "(", "fp", ",", "pathname", ",", "description", ")", "=", "imp", ".", "find_module", "(", "name", ")", "except", "ImportError", ":", "print", "(", "'unable to l...
dynamically imports modules / classes .
train
false
33,620
def test_can_parse_a_unary_array_from_complicated_step(): steps = Step.many_from_lines(I_LIKE_VEGETABLES.splitlines()) assert_equals(len(steps), 1) assert isinstance(steps[0], Step) assert_equals(steps[0].sentence, I_LIKE_VEGETABLES)
[ "def", "test_can_parse_a_unary_array_from_complicated_step", "(", ")", ":", "steps", "=", "Step", ".", "many_from_lines", "(", "I_LIKE_VEGETABLES", ".", "splitlines", "(", ")", ")", "assert_equals", "(", "len", "(", "steps", ")", ",", "1", ")", "assert", "isinst...
it should extract a single tabular step correctly into an array of steps .
train
false
33,623
def _run_vw_command(cmd): LOG.info(u'Running Vowpal Wabbit command: %s', u' '.join(cmd)) proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) output = proc.communicate()[0].decode(u'utf-8') LOG.debug(u'Vowpal Wabbit output: %s', output) if (proc.returncode != 0): raise subprocess.CalledProcessError(proc.returncode, u' '.join(cmd), output=output) return output
[ "def", "_run_vw_command", "(", "cmd", ")", ":", "LOG", ".", "info", "(", "u'Running Vowpal Wabbit command: %s'", ",", "u' '", ".", "join", "(", "cmd", ")", ")", "proc", "=", "subprocess", ".", "Popen", "(", "cmd", ",", "stdout", "=", "subprocess", ".", "...
execute given vowpal wabbit command .
train
false
33,624
@register.filter def render_with_errors(bound_field): widget = bound_field.field.widget if (bound_field.errors and hasattr(widget, u'render_with_errors')): return widget.render_with_errors(bound_field.html_name, bound_field.value(), attrs={u'id': bound_field.auto_id}, errors=bound_field.errors) else: return bound_field.as_widget()
[ "@", "register", ".", "filter", "def", "render_with_errors", "(", "bound_field", ")", ":", "widget", "=", "bound_field", ".", "field", ".", "widget", "if", "(", "bound_field", ".", "errors", "and", "hasattr", "(", "widget", ",", "u'render_with_errors'", ")", ...
usage: {{ field|render_with_errors }} as opposed to {{ field }} .
train
false
33,626
def get_theme_base_dir(theme_dir_name, suppress_error=False): for themes_dir in get_theme_base_dirs(): if (theme_dir_name in get_theme_dirs(themes_dir)): return themes_dir if suppress_error: return None raise ValueError("Theme '{theme}' not found in any of the following themes dirs, \nTheme dirs: \n{dir}".format(theme=theme_dir_name, dir=get_theme_base_dirs()))
[ "def", "get_theme_base_dir", "(", "theme_dir_name", ",", "suppress_error", "=", "False", ")", ":", "for", "themes_dir", "in", "get_theme_base_dirs", "(", ")", ":", "if", "(", "theme_dir_name", "in", "get_theme_dirs", "(", "themes_dir", ")", ")", ":", "return", ...
returns absolute path to the directory that contains the given theme .
train
false