id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
8,892
def create_files_from_macho_zip(fileobj, project=None): scratchpad = tempfile.mkdtemp() try: safe_extract_zip(fileobj, scratchpad) to_create = [] for (dirpath, dirnames, filenames) in os.walk(scratchpad): for fn in filenames: fn = os.path.join(dirpath, fn) try: uuids = get_macho_uuids(fn) except (IOError, ValueError): continue for (cpu, uuid) in uuids: to_create.append((cpu, uuid, fn)) rv = [] for (cpu, uuid, filename) in to_create: with open(filename, 'rb') as f: rv.append(_create_macho_dsym_from_uuid(project, cpu, uuid, f, os.path.basename(filename))) return rv finally: shutil.rmtree(scratchpad)
[ "def", "create_files_from_macho_zip", "(", "fileobj", ",", "project", "=", "None", ")", ":", "scratchpad", "=", "tempfile", ".", "mkdtemp", "(", ")", "try", ":", "safe_extract_zip", "(", "fileobj", ",", "scratchpad", ")", "to_create", "=", "[", "]", "for", "(", "dirpath", ",", "dirnames", ",", "filenames", ")", "in", "os", ".", "walk", "(", "scratchpad", ")", ":", "for", "fn", "in", "filenames", ":", "fn", "=", "os", ".", "path", ".", "join", "(", "dirpath", ",", "fn", ")", "try", ":", "uuids", "=", "get_macho_uuids", "(", "fn", ")", "except", "(", "IOError", ",", "ValueError", ")", ":", "continue", "for", "(", "cpu", ",", "uuid", ")", "in", "uuids", ":", "to_create", ".", "append", "(", "(", "cpu", ",", "uuid", ",", "fn", ")", ")", "rv", "=", "[", "]", "for", "(", "cpu", ",", "uuid", ",", "filename", ")", "in", "to_create", ":", "with", "open", "(", "filename", ",", "'rb'", ")", "as", "f", ":", "rv", ".", "append", "(", "_create_macho_dsym_from_uuid", "(", "project", ",", "cpu", ",", "uuid", ",", "f", ",", "os", ".", "path", ".", "basename", "(", "filename", ")", ")", ")", "return", "rv", "finally", ":", "shutil", ".", "rmtree", "(", "scratchpad", ")" ]
creates all missing dsym files from the given zip file .
train
false
8,893
def composite(image1, image2, mask): return Image.composite(image1, image2, mask)
[ "def", "composite", "(", "image1", ",", "image2", ",", "mask", ")", ":", "return", "Image", ".", "composite", "(", "image1", ",", "image2", ",", "mask", ")" ]
create composite image by blending images using a transparency mask .
train
false
8,894
def fetch_val_for_key(key, delete_key=False): try: if delete_key: return THEANO_FLAGS_DICT.pop(key) return THEANO_FLAGS_DICT[key] except KeyError: pass key_tokens = key.rsplit('.', 1) if (len(key_tokens) > 2): raise KeyError(key) if (len(key_tokens) == 2): (section, option) = key_tokens else: (section, option) = ('global', key) try: try: return theano_cfg.get(section, option) except ConfigParser.InterpolationError: return theano_raw_cfg.get(section, option) except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): raise KeyError(key)
[ "def", "fetch_val_for_key", "(", "key", ",", "delete_key", "=", "False", ")", ":", "try", ":", "if", "delete_key", ":", "return", "THEANO_FLAGS_DICT", ".", "pop", "(", "key", ")", "return", "THEANO_FLAGS_DICT", "[", "key", "]", "except", "KeyError", ":", "pass", "key_tokens", "=", "key", ".", "rsplit", "(", "'.'", ",", "1", ")", "if", "(", "len", "(", "key_tokens", ")", ">", "2", ")", ":", "raise", "KeyError", "(", "key", ")", "if", "(", "len", "(", "key_tokens", ")", "==", "2", ")", ":", "(", "section", ",", "option", ")", "=", "key_tokens", "else", ":", "(", "section", ",", "option", ")", "=", "(", "'global'", ",", "key", ")", "try", ":", "try", ":", "return", "theano_cfg", ".", "get", "(", "section", ",", "option", ")", "except", "ConfigParser", ".", "InterpolationError", ":", "return", "theano_raw_cfg", ".", "get", "(", "section", ",", "option", ")", "except", "(", "ConfigParser", ".", "NoOptionError", ",", "ConfigParser", ".", "NoSectionError", ")", ":", "raise", "KeyError", "(", "key", ")" ]
return the overriding config value for a key .
train
false
8,895
def checkFieldName(field_name): if (field_name not in data_fields): raise ValueError(('%r is not a defined simple registration field' % (field_name,)))
[ "def", "checkFieldName", "(", "field_name", ")", ":", "if", "(", "field_name", "not", "in", "data_fields", ")", ":", "raise", "ValueError", "(", "(", "'%r is not a defined simple registration field'", "%", "(", "field_name", ",", ")", ")", ")" ]
check to see that the given value is a valid simple registration data field name .
train
false
8,896
def contact_autocreate_handler(sender, instance, created, **kwargs): if created: try: contact_type = ContactType.objects.filter((models.Q(name='Person') | models.Q(slug='person')))[0] contact = Contact(contact_type=contact_type, name=instance.name, related_user=instance) contact.save() except: pass
[ "def", "contact_autocreate_handler", "(", "sender", ",", "instance", ",", "created", ",", "**", "kwargs", ")", ":", "if", "created", ":", "try", ":", "contact_type", "=", "ContactType", ".", "objects", ".", "filter", "(", "(", "models", ".", "Q", "(", "name", "=", "'Person'", ")", "|", "models", ".", "Q", "(", "slug", "=", "'person'", ")", ")", ")", "[", "0", "]", "contact", "=", "Contact", "(", "contact_type", "=", "contact_type", ",", "name", "=", "instance", ".", "name", ",", "related_user", "=", "instance", ")", "contact", ".", "save", "(", ")", "except", ":", "pass" ]
when a user is created .
train
false
8,897
def _prepare_pre_auth_info_request(env, path, swift_source): newenv = make_pre_authed_env(env, 'HEAD', path, agent='Swift', query_string='', swift_source=swift_source) newenv.pop('HTTP_ORIGIN', None) newenv['swift_owner'] = True return Request.blank(quote(path), environ=newenv)
[ "def", "_prepare_pre_auth_info_request", "(", "env", ",", "path", ",", "swift_source", ")", ":", "newenv", "=", "make_pre_authed_env", "(", "env", ",", "'HEAD'", ",", "path", ",", "agent", "=", "'Swift'", ",", "query_string", "=", "''", ",", "swift_source", "=", "swift_source", ")", "newenv", ".", "pop", "(", "'HTTP_ORIGIN'", ",", "None", ")", "newenv", "[", "'swift_owner'", "]", "=", "True", "return", "Request", ".", "blank", "(", "quote", "(", "path", ")", ",", "environ", "=", "newenv", ")" ]
prepares a pre authed request to obtain info using a head .
train
false
8,898
def fetch_hdu(filename, cache=True): for retry in range(MAX_RETRIES): try: path = download_file((URL + filename), cache=cache, timeout=30) except: if (retry == (MAX_RETRIES - 1)): raise else: time.sleep(TIME_BETWEEN_RETRIES) else: break else: raise Exception('Failed to download file {0}'.format(filename)) return fits.open(path)[0]
[ "def", "fetch_hdu", "(", "filename", ",", "cache", "=", "True", ")", ":", "for", "retry", "in", "range", "(", "MAX_RETRIES", ")", ":", "try", ":", "path", "=", "download_file", "(", "(", "URL", "+", "filename", ")", ",", "cache", "=", "cache", ",", "timeout", "=", "30", ")", "except", ":", "if", "(", "retry", "==", "(", "MAX_RETRIES", "-", "1", ")", ")", ":", "raise", "else", ":", "time", ".", "sleep", "(", "TIME_BETWEEN_RETRIES", ")", "else", ":", "break", "else", ":", "raise", "Exception", "(", "'Failed to download file {0}'", ".", "format", "(", "filename", ")", ")", "return", "fits", ".", "open", "(", "path", ")", "[", "0", "]" ]
download a fits file to the cache and open hdu 0 .
train
false
8,899
def stftSynth(mY, pY, M, H): hM1 = int(math.floor(((M + 1) / 2))) hM2 = int(math.floor((M / 2))) nFrames = mY[:, 0].size y = np.zeros((((nFrames * H) + hM1) + hM2)) pin = hM1 for i in range(nFrames): y1 = DFT.dftSynth(mY[i, :], pY[i, :], M) y[(pin - hM1):(pin + hM2)] += (H * y1) pin += H y = np.delete(y, range(hM2)) y = np.delete(y, range((y.size - hM1), y.size)) return y
[ "def", "stftSynth", "(", "mY", ",", "pY", ",", "M", ",", "H", ")", ":", "hM1", "=", "int", "(", "math", ".", "floor", "(", "(", "(", "M", "+", "1", ")", "/", "2", ")", ")", ")", "hM2", "=", "int", "(", "math", ".", "floor", "(", "(", "M", "/", "2", ")", ")", ")", "nFrames", "=", "mY", "[", ":", ",", "0", "]", ".", "size", "y", "=", "np", ".", "zeros", "(", "(", "(", "(", "nFrames", "*", "H", ")", "+", "hM1", ")", "+", "hM2", ")", ")", "pin", "=", "hM1", "for", "i", "in", "range", "(", "nFrames", ")", ":", "y1", "=", "DFT", ".", "dftSynth", "(", "mY", "[", "i", ",", ":", "]", ",", "pY", "[", "i", ",", ":", "]", ",", "M", ")", "y", "[", "(", "pin", "-", "hM1", ")", ":", "(", "pin", "+", "hM2", ")", "]", "+=", "(", "H", "*", "y1", ")", "pin", "+=", "H", "y", "=", "np", ".", "delete", "(", "y", ",", "range", "(", "hM2", ")", ")", "y", "=", "np", ".", "delete", "(", "y", ",", "range", "(", "(", "y", ".", "size", "-", "hM1", ")", ",", "y", ".", "size", ")", ")", "return", "y" ]
synthesis of a sound using the short-time fourier transform my: magnitude spectra .
train
false
8,900
def addNameToCache(name, tvdb_id): name = sanitizeSceneName(name) if (not tvdb_id): tvdb_id = 0 cacheDB = db.DBConnection('cache.db') cacheDB.action('INSERT INTO scene_names (tvdb_id, name) VALUES (?, ?)', [tvdb_id, name])
[ "def", "addNameToCache", "(", "name", ",", "tvdb_id", ")", ":", "name", "=", "sanitizeSceneName", "(", "name", ")", "if", "(", "not", "tvdb_id", ")", ":", "tvdb_id", "=", "0", "cacheDB", "=", "db", ".", "DBConnection", "(", "'cache.db'", ")", "cacheDB", ".", "action", "(", "'INSERT INTO scene_names (tvdb_id, name) VALUES (?, ?)'", ",", "[", "tvdb_id", ",", "name", "]", ")" ]
adds the show & tvdb id to the scene_names table in cache .
train
false
8,901
def _bgp_capability_dispatcher(payload): cls = _capabilities_registry['BGPCapGeneric'] if (payload is None): cls = _capabilities_registry['BGPCapGeneric'] else: length = len(payload) if (length >= _BGP_CAPABILITY_MIN_SIZE): code = struct.unpack('!B', payload[0])[0] cls = _get_cls(_capabilities_objects.get(code, 'BGPCapGeneric')) return cls
[ "def", "_bgp_capability_dispatcher", "(", "payload", ")", ":", "cls", "=", "_capabilities_registry", "[", "'BGPCapGeneric'", "]", "if", "(", "payload", "is", "None", ")", ":", "cls", "=", "_capabilities_registry", "[", "'BGPCapGeneric'", "]", "else", ":", "length", "=", "len", "(", "payload", ")", "if", "(", "length", ">=", "_BGP_CAPABILITY_MIN_SIZE", ")", ":", "code", "=", "struct", ".", "unpack", "(", "'!B'", ",", "payload", "[", "0", "]", ")", "[", "0", "]", "cls", "=", "_get_cls", "(", "_capabilities_objects", ".", "get", "(", "code", ",", "'BGPCapGeneric'", ")", ")", "return", "cls" ]
returns the right class for a given bgp capability .
train
false
8,902
@memoize(prefix='get_excluded_in') def get_excluded_in(region_id): aers = list(AddonExcludedRegion.objects.filter(region=region_id).values_list('addon', flat=True)) geodata_qs = Q() region = parse_region(region_id) if (region in (mkt.regions.BRA, mkt.regions.DEU)): geodata_qs |= Q(**{('region_%s_iarc_exclude' % region.slug): True}) if (region == mkt.regions.DEU): geodata_qs |= Q(**{'region_de_usk_exclude': True}) geodata_exclusions = [] if geodata_qs: geodata_exclusions = list(Geodata.objects.filter(geodata_qs).values_list('addon', flat=True)) return set((aers + geodata_exclusions))
[ "@", "memoize", "(", "prefix", "=", "'get_excluded_in'", ")", "def", "get_excluded_in", "(", "region_id", ")", ":", "aers", "=", "list", "(", "AddonExcludedRegion", ".", "objects", ".", "filter", "(", "region", "=", "region_id", ")", ".", "values_list", "(", "'addon'", ",", "flat", "=", "True", ")", ")", "geodata_qs", "=", "Q", "(", ")", "region", "=", "parse_region", "(", "region_id", ")", "if", "(", "region", "in", "(", "mkt", ".", "regions", ".", "BRA", ",", "mkt", ".", "regions", ".", "DEU", ")", ")", ":", "geodata_qs", "|=", "Q", "(", "**", "{", "(", "'region_%s_iarc_exclude'", "%", "region", ".", "slug", ")", ":", "True", "}", ")", "if", "(", "region", "==", "mkt", ".", "regions", ".", "DEU", ")", ":", "geodata_qs", "|=", "Q", "(", "**", "{", "'region_de_usk_exclude'", ":", "True", "}", ")", "geodata_exclusions", "=", "[", "]", "if", "geodata_qs", ":", "geodata_exclusions", "=", "list", "(", "Geodata", ".", "objects", ".", "filter", "(", "geodata_qs", ")", ".", "values_list", "(", "'addon'", ",", "flat", "=", "True", ")", ")", "return", "set", "(", "(", "aers", "+", "geodata_exclusions", ")", ")" ]
return ids of webapp objects excluded from a particular region or excluded due to geodata flags .
train
false
8,903
def dump_objects(): from meliae import scanner scanner.dump_all_objects((PROFILING_OUTPUT_FMT % get_filename_fmt()))
[ "def", "dump_objects", "(", ")", ":", "from", "meliae", "import", "scanner", "scanner", ".", "dump_all_objects", "(", "(", "PROFILING_OUTPUT_FMT", "%", "get_filename_fmt", "(", ")", ")", ")" ]
get all registered objects in all registries as a string .
train
false
8,904
def set_probes(probes, test=False, commit=True): return __salt__['net.load_template']('set_probes', probes=probes, test=test, commit=commit)
[ "def", "set_probes", "(", "probes", ",", "test", "=", "False", ",", "commit", "=", "True", ")", ":", "return", "__salt__", "[", "'net.load_template'", "]", "(", "'set_probes'", ",", "probes", "=", "probes", ",", "test", "=", "test", ",", "commit", "=", "commit", ")" ]
configures rpm/sla probes on the device .
train
false
8,905
def make_api_project(project_data): from readthedocs.projects.models import Project for key in ['users', 'resource_uri', 'absolute_url', 'downloads', 'main_language_project', 'related_projects']: if (key in project_data): del project_data[key] project = Project(**project_data) project.save = _new_save return project
[ "def", "make_api_project", "(", "project_data", ")", ":", "from", "readthedocs", ".", "projects", ".", "models", "import", "Project", "for", "key", "in", "[", "'users'", ",", "'resource_uri'", ",", "'absolute_url'", ",", "'downloads'", ",", "'main_language_project'", ",", "'related_projects'", "]", ":", "if", "(", "key", "in", "project_data", ")", ":", "del", "project_data", "[", "key", "]", "project", "=", "Project", "(", "**", "project_data", ")", "project", ".", "save", "=", "_new_save", "return", "project" ]
make mock project instance from api return .
train
false
8,906
def createservicesid(svc): uni = ''.join([(c + '\x00') for c in svc]) sha = hashlib.sha1(uni.upper()).digest() dec = list() for i in range(5): dec.append(struct.unpack('<I', sha[(i * 4):((i * 4) + 4)])[0]) return ('S-1-5-80-' + '-'.join([str(n) for n in dec]))
[ "def", "createservicesid", "(", "svc", ")", ":", "uni", "=", "''", ".", "join", "(", "[", "(", "c", "+", "'\\x00'", ")", "for", "c", "in", "svc", "]", ")", "sha", "=", "hashlib", ".", "sha1", "(", "uni", ".", "upper", "(", ")", ")", ".", "digest", "(", ")", "dec", "=", "list", "(", ")", "for", "i", "in", "range", "(", "5", ")", ":", "dec", ".", "append", "(", "struct", ".", "unpack", "(", "'<I'", ",", "sha", "[", "(", "i", "*", "4", ")", ":", "(", "(", "i", "*", "4", ")", "+", "4", ")", "]", ")", "[", "0", "]", ")", "return", "(", "'S-1-5-80-'", "+", "'-'", ".", "join", "(", "[", "str", "(", "n", ")", "for", "n", "in", "dec", "]", ")", ")" ]
calculate the service sid .
train
false
8,907
def ravel(a): return a.ravel()
[ "def", "ravel", "(", "a", ")", ":", "return", "a", ".", "ravel", "(", ")" ]
returns a flattened array .
train
false
8,908
@cacheit def _nT(n, k): if (k == 0): return (1 if (k == n) else 0) return sum((_nT((n - k), j) for j in range((min(k, (n - k)) + 1))))
[ "@", "cacheit", "def", "_nT", "(", "n", ",", "k", ")", ":", "if", "(", "k", "==", "0", ")", ":", "return", "(", "1", "if", "(", "k", "==", "n", ")", "else", "0", ")", "return", "sum", "(", "(", "_nT", "(", "(", "n", "-", "k", ")", ",", "j", ")", "for", "j", "in", "range", "(", "(", "min", "(", "k", ",", "(", "n", "-", "k", ")", ")", "+", "1", ")", ")", ")", ")" ]
return the partitions of n items into k parts .
train
false
8,909
def uninstalled(name, password, keychain='/Library/Keychains/System.keychain', keychain_password=None): ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} certs = __salt__['keychain.list_certs'](keychain) if ('.p12' in name): if (('http' in name) or ('salt' in name)): name = __salt__['cp.cache_file'](name) friendly_name = __salt__['keychain.get_friendly_name'](name, password) else: friendly_name = name if (friendly_name in certs): out = __salt__['keychain.uninstall'](friendly_name, keychain, keychain_password) if ('unable' not in out): ret['changes']['uninstalled'] = friendly_name else: ret['result'] = False ret['comment'] += 'Failed to uninstall {0}'.format(friendly_name) else: ret['comment'] += '{0} already uninstalled.'.format(friendly_name) return ret
[ "def", "uninstalled", "(", "name", ",", "password", ",", "keychain", "=", "'/Library/Keychains/System.keychain'", ",", "keychain_password", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "True", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "certs", "=", "__salt__", "[", "'keychain.list_certs'", "]", "(", "keychain", ")", "if", "(", "'.p12'", "in", "name", ")", ":", "if", "(", "(", "'http'", "in", "name", ")", "or", "(", "'salt'", "in", "name", ")", ")", ":", "name", "=", "__salt__", "[", "'cp.cache_file'", "]", "(", "name", ")", "friendly_name", "=", "__salt__", "[", "'keychain.get_friendly_name'", "]", "(", "name", ",", "password", ")", "else", ":", "friendly_name", "=", "name", "if", "(", "friendly_name", "in", "certs", ")", ":", "out", "=", "__salt__", "[", "'keychain.uninstall'", "]", "(", "friendly_name", ",", "keychain", ",", "keychain_password", ")", "if", "(", "'unable'", "not", "in", "out", ")", ":", "ret", "[", "'changes'", "]", "[", "'uninstalled'", "]", "=", "friendly_name", "else", ":", "ret", "[", "'result'", "]", "=", "False", "ret", "[", "'comment'", "]", "+=", "'Failed to uninstall {0}'", ".", "format", "(", "friendly_name", ")", "else", ":", "ret", "[", "'comment'", "]", "+=", "'{0} already uninstalled.'", ".", "format", "(", "friendly_name", ")", "return", "ret" ]
uninstalls a package name the name of the package to be uninstalled version uninstalls a specific version of the package .
train
true
8,910
def configure_proxy(proxyname, start=True): changes_new = [] changes_old = [] status_file = True test = __opts__['test'] proxyfile = '/etc/salt/proxy' (status_file, msg_new, msg_old) = _proxy_conf_file(proxyfile, test) changes_new.extend(msg_new) changes_old.extend(msg_old) status_proc = False if start: (status_proc, msg_new, msg_old) = _proxy_process(proxyname, test) changes_old.extend(msg_old) changes_new.extend(msg_new) else: changes_old.append('Start is False, not starting salt-proxy process') log.debug('Process not started') return {'result': (status_file and status_proc), 'changes': {'old': '\n'.join(changes_old), 'new': '\n'.join(changes_new)}}
[ "def", "configure_proxy", "(", "proxyname", ",", "start", "=", "True", ")", ":", "changes_new", "=", "[", "]", "changes_old", "=", "[", "]", "status_file", "=", "True", "test", "=", "__opts__", "[", "'test'", "]", "proxyfile", "=", "'/etc/salt/proxy'", "(", "status_file", ",", "msg_new", ",", "msg_old", ")", "=", "_proxy_conf_file", "(", "proxyfile", ",", "test", ")", "changes_new", ".", "extend", "(", "msg_new", ")", "changes_old", ".", "extend", "(", "msg_old", ")", "status_proc", "=", "False", "if", "start", ":", "(", "status_proc", ",", "msg_new", ",", "msg_old", ")", "=", "_proxy_process", "(", "proxyname", ",", "test", ")", "changes_old", ".", "extend", "(", "msg_old", ")", "changes_new", ".", "extend", "(", "msg_new", ")", "else", ":", "changes_old", ".", "append", "(", "'Start is False, not starting salt-proxy process'", ")", "log", ".", "debug", "(", "'Process not started'", ")", "return", "{", "'result'", ":", "(", "status_file", "and", "status_proc", ")", ",", "'changes'", ":", "{", "'old'", ":", "'\\n'", ".", "join", "(", "changes_old", ")", ",", "'new'", ":", "'\\n'", ".", "join", "(", "changes_new", ")", "}", "}" ]
create the salt proxy file and start the proxy process if required parameters: name: the name of this state proxyname: name to be used for this proxy start: boolean indicating if the process should be started example: .
train
true
8,913
def render_common_pairs(profile=None): if (profile is None): profile = snapshot_profile() def seq(): for (_, ops, count) in common_pairs(profile): (yield ('%s: %s\n' % (count, ops))) return ''.join(seq())
[ "def", "render_common_pairs", "(", "profile", "=", "None", ")", ":", "if", "(", "profile", "is", "None", ")", ":", "profile", "=", "snapshot_profile", "(", ")", "def", "seq", "(", ")", ":", "for", "(", "_", ",", "ops", ",", "count", ")", "in", "common_pairs", "(", "profile", ")", ":", "(", "yield", "(", "'%s: %s\\n'", "%", "(", "count", ",", "ops", ")", ")", ")", "return", "''", ".", "join", "(", "seq", "(", ")", ")" ]
renders the most common opcode pairs to a string in order of descending frequency .
train
false
8,914
def test_unset_text(qtbot): label = TextBase() qtbot.add_widget(label) label.setText('foo') label.setText('') assert (not label._elided_text)
[ "def", "test_unset_text", "(", "qtbot", ")", ":", "label", "=", "TextBase", "(", ")", "qtbot", ".", "add_widget", "(", "label", ")", "label", ".", "setText", "(", "'foo'", ")", "label", ".", "setText", "(", "''", ")", "assert", "(", "not", "label", ".", "_elided_text", ")" ]
make sure the text is cleared properly .
train
false
8,915
def with_settings(*arg_settings, **kw_settings): def outer(func): @wraps(func) def inner(*args, **kwargs): with settings(*arg_settings, **kw_settings): return func(*args, **kwargs) return _wrap_as_new(func, inner) return outer
[ "def", "with_settings", "(", "*", "arg_settings", ",", "**", "kw_settings", ")", ":", "def", "outer", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "inner", "(", "*", "args", ",", "**", "kwargs", ")", ":", "with", "settings", "(", "*", "arg_settings", ",", "**", "kw_settings", ")", ":", "return", "func", "(", "*", "args", ",", "**", "kwargs", ")", "return", "_wrap_as_new", "(", "func", ",", "inner", ")", "return", "outer" ]
decorator equivalent of fabric .
train
false
8,916
def maybe_unwrap_results(results): return getattr(results, '_results', results)
[ "def", "maybe_unwrap_results", "(", "results", ")", ":", "return", "getattr", "(", "results", ",", "'_results'", ",", "results", ")" ]
gets raw results back from wrapped results .
train
false
8,917
def is_base(text): return min([(ord(char) in BASE_CHARS) for char in text])
[ "def", "is_base", "(", "text", ")", ":", "return", "min", "(", "[", "(", "ord", "(", "char", ")", "in", "BASE_CHARS", ")", "for", "char", "in", "text", "]", ")" ]
checks whether text should use cjk fonts .
train
false
8,918
def testdoc(*arguments, **options): TestDoc().execute(*arguments, **options)
[ "def", "testdoc", "(", "*", "arguments", ",", "**", "options", ")", ":", "TestDoc", "(", ")", ".", "execute", "(", "*", "arguments", ",", "**", "options", ")" ]
executes testdoc programmatically .
train
false
8,919
def _roll_vectorized(M, roll_indices, axis): assert (axis in [0, 1]) ndim = M.ndim assert (ndim == 3) ndim_roll = roll_indices.ndim assert (ndim_roll == 1) sh = M.shape (r, c) = sh[(-2):] assert (sh[0] == roll_indices.shape[0]) vec_indices = np.arange(sh[0], dtype=np.int32) M_roll = np.empty_like(M) if (axis == 0): for ir in range(r): for ic in range(c): M_roll[:, ir, ic] = M[(vec_indices, (((- roll_indices) + ir) % r), ic)] elif (axis == 1): for ir in range(r): for ic in range(c): M_roll[:, ir, ic] = M[(vec_indices, ir, (((- roll_indices) + ic) % c))] return M_roll
[ "def", "_roll_vectorized", "(", "M", ",", "roll_indices", ",", "axis", ")", ":", "assert", "(", "axis", "in", "[", "0", ",", "1", "]", ")", "ndim", "=", "M", ".", "ndim", "assert", "(", "ndim", "==", "3", ")", "ndim_roll", "=", "roll_indices", ".", "ndim", "assert", "(", "ndim_roll", "==", "1", ")", "sh", "=", "M", ".", "shape", "(", "r", ",", "c", ")", "=", "sh", "[", "(", "-", "2", ")", ":", "]", "assert", "(", "sh", "[", "0", "]", "==", "roll_indices", ".", "shape", "[", "0", "]", ")", "vec_indices", "=", "np", ".", "arange", "(", "sh", "[", "0", "]", ",", "dtype", "=", "np", ".", "int32", ")", "M_roll", "=", "np", ".", "empty_like", "(", "M", ")", "if", "(", "axis", "==", "0", ")", ":", "for", "ir", "in", "range", "(", "r", ")", ":", "for", "ic", "in", "range", "(", "c", ")", ":", "M_roll", "[", ":", ",", "ir", ",", "ic", "]", "=", "M", "[", "(", "vec_indices", ",", "(", "(", "(", "-", "roll_indices", ")", "+", "ir", ")", "%", "r", ")", ",", "ic", ")", "]", "elif", "(", "axis", "==", "1", ")", ":", "for", "ir", "in", "range", "(", "r", ")", ":", "for", "ic", "in", "range", "(", "c", ")", ":", "M_roll", "[", ":", ",", "ir", ",", "ic", "]", "=", "M", "[", "(", "vec_indices", ",", "ir", ",", "(", "(", "(", "-", "roll_indices", ")", "+", "ic", ")", "%", "c", ")", ")", "]", "return", "M_roll" ]
rolls an array of matrices along an axis according to an array of indices *roll_indices* *axis* can be either 0 or 1 .
train
false
8,921
def SecurityCheck(func): def Wrapper(request, *args, **kwargs): 'Wrapping function.' if (WEBAUTH_MANAGER is None): raise RuntimeError('Attempt to initialize before WEBAUTH_MANAGER set.') return WEBAUTH_MANAGER.SecurityCheck(func, request, *args, **kwargs) return Wrapper
[ "def", "SecurityCheck", "(", "func", ")", ":", "def", "Wrapper", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "WEBAUTH_MANAGER", "is", "None", ")", ":", "raise", "RuntimeError", "(", "'Attempt to initialize before WEBAUTH_MANAGER set.'", ")", "return", "WEBAUTH_MANAGER", ".", "SecurityCheck", "(", "func", ",", "request", ",", "*", "args", ",", "**", "kwargs", ")", "return", "Wrapper" ]
a decorator applied to protected web handlers .
train
true
8,922
def _build_id_tuple(params, spec): if (spec is None): return (None, None) required_class = spec.class_ required_tag = spec.tag tag_type = params.get(u'tag_type', spec.tag_type) if (tag_type is not None): required_class = 2 required_class = params.get(u'class_', required_class) required_tag = params.get(u'tag', required_tag) return (required_class, required_tag)
[ "def", "_build_id_tuple", "(", "params", ",", "spec", ")", ":", "if", "(", "spec", "is", "None", ")", ":", "return", "(", "None", ",", "None", ")", "required_class", "=", "spec", ".", "class_", "required_tag", "=", "spec", ".", "tag", "tag_type", "=", "params", ".", "get", "(", "u'tag_type'", ",", "spec", ".", "tag_type", ")", "if", "(", "tag_type", "is", "not", "None", ")", ":", "required_class", "=", "2", "required_class", "=", "params", ".", "get", "(", "u'class_'", ",", "required_class", ")", "required_tag", "=", "params", ".", "get", "(", "u'tag'", ",", "required_tag", ")", "return", "(", "required_class", ",", "required_tag", ")" ]
builds a 2-element tuple used to identify fields by grabbing the class_ and tag from an asn1value class and the params dict being passed to it .
train
false
8,925
@calculator(272696320) def calculate_perf_counter_counter(previous, current, property_name): n0 = previous[property_name] n1 = current[property_name] d0 = previous['Timestamp_Sys100NS'] d1 = current['Timestamp_Sys100NS'] f = current['Frequency_Sys100NS'] if ((n0 is None) or (n1 is None)): return return ((n1 - n0) / ((d1 - d0) / f))
[ "@", "calculator", "(", "272696320", ")", "def", "calculate_perf_counter_counter", "(", "previous", ",", "current", ",", "property_name", ")", ":", "n0", "=", "previous", "[", "property_name", "]", "n1", "=", "current", "[", "property_name", "]", "d0", "=", "previous", "[", "'Timestamp_Sys100NS'", "]", "d1", "=", "current", "[", "'Timestamp_Sys100NS'", "]", "f", "=", "current", "[", "'Frequency_Sys100NS'", "]", "if", "(", "(", "n0", "is", "None", ")", "or", "(", "n1", "is", "None", ")", ")", ":", "return", "return", "(", "(", "n1", "-", "n0", ")", "/", "(", "(", "d1", "-", "d0", ")", "/", "f", ")", ")" ]
perf_counter_counter URL .
train
true
8,926
def myTakeStep2(x): s = 0.5 x += np.random.uniform((- s), s, np.shape(x)) return x
[ "def", "myTakeStep2", "(", "x", ")", ":", "s", "=", "0.5", "x", "+=", "np", ".", "random", ".", "uniform", "(", "(", "-", "s", ")", ",", "s", ",", "np", ".", "shape", "(", "x", ")", ")", "return", "x" ]
redo randomdisplacement in function form without the attribute stepsize to make sure still everything works ok .
train
false
8,927
def tied_rank(x): sorted_x = sorted(zip(x, range(len(x)))) r = [0 for k in x] cur_val = sorted_x[0][0] last_rank = 0 for i in range(len(sorted_x)): if (cur_val != sorted_x[i][0]): cur_val = sorted_x[i][0] for j in range(last_rank, i): r[sorted_x[j][1]] = (float(((last_rank + 1) + i)) / 2.0) last_rank = i if (i == (len(sorted_x) - 1)): for j in range(last_rank, (i + 1)): r[sorted_x[j][1]] = (float(((last_rank + i) + 2)) / 2.0) return r
[ "def", "tied_rank", "(", "x", ")", ":", "sorted_x", "=", "sorted", "(", "zip", "(", "x", ",", "range", "(", "len", "(", "x", ")", ")", ")", ")", "r", "=", "[", "0", "for", "k", "in", "x", "]", "cur_val", "=", "sorted_x", "[", "0", "]", "[", "0", "]", "last_rank", "=", "0", "for", "i", "in", "range", "(", "len", "(", "sorted_x", ")", ")", ":", "if", "(", "cur_val", "!=", "sorted_x", "[", "i", "]", "[", "0", "]", ")", ":", "cur_val", "=", "sorted_x", "[", "i", "]", "[", "0", "]", "for", "j", "in", "range", "(", "last_rank", ",", "i", ")", ":", "r", "[", "sorted_x", "[", "j", "]", "[", "1", "]", "]", "=", "(", "float", "(", "(", "(", "last_rank", "+", "1", ")", "+", "i", ")", ")", "/", "2.0", ")", "last_rank", "=", "i", "if", "(", "i", "==", "(", "len", "(", "sorted_x", ")", "-", "1", ")", ")", ":", "for", "j", "in", "range", "(", "last_rank", ",", "(", "i", "+", "1", ")", ")", ":", "r", "[", "sorted_x", "[", "j", "]", "[", "1", "]", "]", "=", "(", "float", "(", "(", "(", "last_rank", "+", "i", ")", "+", "2", ")", ")", "/", "2.0", ")", "return", "r" ]
computes the tied rank of elements in x .
train
false
8,929
def get_link_name(connection, link): if link: return connection.follow_link(link).name else: return None
[ "def", "get_link_name", "(", "connection", ",", "link", ")", ":", "if", "link", ":", "return", "connection", ".", "follow_link", "(", "link", ")", ".", "name", "else", ":", "return", "None" ]
this method returns the name of the element which link points to .
train
false
8,931
def nthstr(n): assert (n >= 0) if ((n % 100) in [11, 12, 13]): return ('%sth' % n) return ({1: '%sst', 2: '%snd', 3: '%srd'}.get((n % 10), '%sth') % n)
[ "def", "nthstr", "(", "n", ")", ":", "assert", "(", "n", ">=", "0", ")", "if", "(", "(", "n", "%", "100", ")", "in", "[", "11", ",", "12", ",", "13", "]", ")", ":", "return", "(", "'%sth'", "%", "n", ")", "return", "(", "{", "1", ":", "'%sst'", ",", "2", ":", "'%snd'", ",", "3", ":", "'%srd'", "}", ".", "get", "(", "(", "n", "%", "10", ")", ",", "'%sth'", ")", "%", "n", ")" ]
formats an ordinal .
train
false
8,932
def _encode_objectid(name, value, dummy0, dummy1): return (('\x07' + name) + value.binary)
[ "def", "_encode_objectid", "(", "name", ",", "value", ",", "dummy0", ",", "dummy1", ")", ":", "return", "(", "(", "'\\x07'", "+", "name", ")", "+", "value", ".", "binary", ")" ]
encode bson .
train
false
8,934
def build_nbt(): os.chdir('pymclevel') os.system((sys.executable + ' setup.py build_ext --inplace --force')) os.chdir('..')
[ "def", "build_nbt", "(", ")", ":", "os", ".", "chdir", "(", "'pymclevel'", ")", "os", ".", "system", "(", "(", "sys", ".", "executable", "+", "' setup.py build_ext --inplace --force'", ")", ")", "os", ".", "chdir", "(", "'..'", ")" ]
builds _nbt .
train
false
8,935
def pluginPackagePaths(name): package = name.split('.') return [os.path.abspath(os.path.join(x, *package)) for x in sys.path if (not os.path.exists(os.path.join(x, *(package + ['__init__.py']))))]
[ "def", "pluginPackagePaths", "(", "name", ")", ":", "package", "=", "name", ".", "split", "(", "'.'", ")", "return", "[", "os", ".", "path", ".", "abspath", "(", "os", ".", "path", ".", "join", "(", "x", ",", "*", "package", ")", ")", "for", "x", "in", "sys", ".", "path", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "os", ".", "path", ".", "join", "(", "x", ",", "*", "(", "package", "+", "[", "'__init__.py'", "]", ")", ")", ")", ")", "]" ]
return a list of additional directories which should be searched for modules to be included as part of the named plugin package .
train
true
8,936
def test_docinfo(): exampledocinfo = env.metadata['metadata'] expecteddocinfo = {'author': u'David Goodger', 'authors': [u'Me', u'Myself', u'I'], 'address': u'123 Example Street\nExample, EX Canada\nA1B 2C3', 'field name': u'This is a generic bibliographic field.', 'field name 2': u'Generic bibliographic fields may contain multiple body elements.\n\nLike this.', 'status': u'This is a "work in progress"', 'version': u'1', 'copyright': u"This document has been placed in the public domain. You\nmay do with it as you wish. You may copy, modify,\nredistribute, reattribute, sell, buy, rent, lease,\ndestroy, or improve it, quote it at length, excerpt,\nincorporate, collate, fold, staple, or mutilate it, or do\nanything else to it that your or anyone else's heart\ndesires.", 'contact': u'goodger@python.org', 'date': u'2006-05-21', 'organization': u'humankind', 'revision': u'4564'} for key in exampledocinfo: (yield (assert_equals, exampledocinfo.get(key), expecteddocinfo.get(key))) (yield (assert_equals, set(expecteddocinfo.keys()), set(exampledocinfo.keys())))
[ "def", "test_docinfo", "(", ")", ":", "exampledocinfo", "=", "env", ".", "metadata", "[", "'metadata'", "]", "expecteddocinfo", "=", "{", "'author'", ":", "u'David Goodger'", ",", "'authors'", ":", "[", "u'Me'", ",", "u'Myself'", ",", "u'I'", "]", ",", "'address'", ":", "u'123 Example Street\\nExample, EX Canada\\nA1B 2C3'", ",", "'field name'", ":", "u'This is a generic bibliographic field.'", ",", "'field name 2'", ":", "u'Generic bibliographic fields may contain multiple body elements.\\n\\nLike this.'", ",", "'status'", ":", "u'This is a \"work in progress\"'", ",", "'version'", ":", "u'1'", ",", "'copyright'", ":", "u\"This document has been placed in the public domain. You\\nmay do with it as you wish. You may copy, modify,\\nredistribute, reattribute, sell, buy, rent, lease,\\ndestroy, or improve it, quote it at length, excerpt,\\nincorporate, collate, fold, staple, or mutilate it, or do\\nanything else to it that your or anyone else's heart\\ndesires.\"", ",", "'contact'", ":", "u'goodger@python.org'", ",", "'date'", ":", "u'2006-05-21'", ",", "'organization'", ":", "u'humankind'", ",", "'revision'", ":", "u'4564'", "}", "for", "key", "in", "exampledocinfo", ":", "(", "yield", "(", "assert_equals", ",", "exampledocinfo", ".", "get", "(", "key", ")", ",", "expecteddocinfo", ".", "get", "(", "key", ")", ")", ")", "(", "yield", "(", "assert_equals", ",", "set", "(", "expecteddocinfo", ".", "keys", "(", ")", ")", ",", "set", "(", "exampledocinfo", ".", "keys", "(", ")", ")", ")", ")" ]
inspect the docinfo metadata stored in the first node of the document .
train
false
8,937
def raw_post_data(request): return HttpResponse(request.raw_post_data)
[ "def", "raw_post_data", "(", "request", ")", ":", "return", "HttpResponse", "(", "request", ".", "raw_post_data", ")" ]
a view that is requested with get and accesses request .
train
false
8,938
def p_function_type(p): if (p[1] == 'void'): p[0] = TType.VOID else: p[0] = p[1]
[ "def", "p_function_type", "(", "p", ")", ":", "if", "(", "p", "[", "1", "]", "==", "'void'", ")", ":", "p", "[", "0", "]", "=", "TType", ".", "VOID", "else", ":", "p", "[", "0", "]", "=", "p", "[", "1", "]" ]
function_type : field_type | void .
train
false
8,941
def _compile_template_re(delimiters): tag_types = '!>&/#^' tag = ('\n (?P<whitespace>[\\ \\t]*)\n %(otag)s \\s*\n (?:\n (?P<change>=) \\s* (?P<delims>.+?) \\s* = |\n (?P<raw>{) \\s* (?P<raw_name>.+?) \\s* } |\n (?P<tag>[%(tag_types)s]?) \\s* (?P<tag_key>[\\s\\S]+?)\n )\n \\s* %(ctag)s\n ' % {'tag_types': tag_types, 'otag': re.escape(delimiters[0]), 'ctag': re.escape(delimiters[1])}) return re.compile(tag, re.VERBOSE)
[ "def", "_compile_template_re", "(", "delimiters", ")", ":", "tag_types", "=", "'!>&/#^'", "tag", "=", "(", "'\\n (?P<whitespace>[\\\\ \\\\t]*)\\n %(otag)s \\\\s*\\n (?:\\n (?P<change>=) \\\\s* (?P<delims>.+?) \\\\s* = |\\n (?P<raw>{) \\\\s* (?P<raw_name>.+?) \\\\s* } |\\n (?P<tag>[%(tag_types)s]?) \\\\s* (?P<tag_key>[\\\\s\\\\S]+?)\\n )\\n \\\\s* %(ctag)s\\n '", "%", "{", "'tag_types'", ":", "tag_types", ",", "'otag'", ":", "re", ".", "escape", "(", "delimiters", "[", "0", "]", ")", ",", "'ctag'", ":", "re", ".", "escape", "(", "delimiters", "[", "1", "]", ")", "}", ")", "return", "re", ".", "compile", "(", "tag", ",", "re", ".", "VERBOSE", ")" ]
return a regular expression object instance .
train
true
8,942
def describe_domain_brief(domain): items = OrderedDict() if (domain is None): return items items['Features'] = (len(domain.attributes) or 'None') items['Meta attributes'] = (len(domain.metas) or 'None') if domain.has_discrete_class: items['Target'] = "Class '{}'".format(domain.class_var.name) elif domain.has_continuous_class: items['Target'] = "Numeric variable '{}'".format(domain.class_var.name) elif domain.class_vars: items['Targets'] = len(domain.class_vars) else: items['Targets'] = False return items
[ "def", "describe_domain_brief", "(", "domain", ")", ":", "items", "=", "OrderedDict", "(", ")", "if", "(", "domain", "is", "None", ")", ":", "return", "items", "items", "[", "'Features'", "]", "=", "(", "len", "(", "domain", ".", "attributes", ")", "or", "'None'", ")", "items", "[", "'Meta attributes'", "]", "=", "(", "len", "(", "domain", ".", "metas", ")", "or", "'None'", ")", "if", "domain", ".", "has_discrete_class", ":", "items", "[", "'Target'", "]", "=", "\"Class '{}'\"", ".", "format", "(", "domain", ".", "class_var", ".", "name", ")", "elif", "domain", ".", "has_continuous_class", ":", "items", "[", "'Target'", "]", "=", "\"Numeric variable '{}'\"", ".", "format", "(", "domain", ".", "class_var", ".", "name", ")", "elif", "domain", ".", "class_vars", ":", "items", "[", "'Targets'", "]", "=", "len", "(", "domain", ".", "class_vars", ")", "else", ":", "items", "[", "'Targets'", "]", "=", "False", "return", "items" ]
return an :obj:ordereddict with the number of features .
train
false
8,944
def dmp_gf_sqf_part(f, K): raise NotImplementedError('multivariate polynomials over finite fields')
[ "def", "dmp_gf_sqf_part", "(", "f", ",", "K", ")", ":", "raise", "NotImplementedError", "(", "'multivariate polynomials over finite fields'", ")" ]
compute square-free part of f in gf(p)[x] .
train
false
8,948
def clear_flows(): for c in core.openflow.connections: d = of.ofp_flow_mod(command=of.OFPFC_DELETE) c.send(d)
[ "def", "clear_flows", "(", ")", ":", "for", "c", "in", "core", ".", "openflow", ".", "connections", ":", "d", "=", "of", ".", "ofp_flow_mod", "(", "command", "=", "of", ".", "OFPFC_DELETE", ")", "c", ".", "send", "(", "d", ")" ]
clear flows on all switches .
train
false
8,949
def list_networks(kwargs=None, call=None): if (call != 'function'): raise SaltCloudSystemExit('The list_networks function must be called with -f or --function.') return {'Networks': salt.utils.vmware.list_networks(_get_si())}
[ "def", "list_networks", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The list_networks function must be called with -f or --function.'", ")", "return", "{", "'Networks'", ":", "salt", ".", "utils", ".", "vmware", ".", "list_networks", "(", "_get_si", "(", ")", ")", "}" ]
list virtual networks .
train
true
8,950
def SetIfNotNone(dict, attr_name, value): if (value is not None): dict[attr_name] = value
[ "def", "SetIfNotNone", "(", "dict", ",", "attr_name", ",", "value", ")", ":", "if", "(", "value", "is", "not", "None", ")", ":", "dict", "[", "attr_name", "]", "=", "value" ]
if "value" is not none .
train
false
8,951
def dead_items(vfs_top, paths): dead_branches = {} dead_saves = {} for path in paths: try: n = vfs_top.lresolve(path) except vfs.NodeError as e: add_error(('unable to resolve %s: %s' % (path, e))) else: if isinstance(n, vfs.BranchList): branchname = n.name dead_branches[branchname] = n dead_saves.pop(branchname, None) elif (isinstance(n, vfs.FakeSymlink) and isinstance(n.parent, vfs.BranchList)): if (n.name == 'latest'): add_error("error: cannot delete 'latest' symlink") else: branchname = n.parent.name if (branchname not in dead_branches): dead_saves.setdefault(branchname, []).append(n) else: add_error(("don't know how to remove %r yet" % n.fullname())) if saved_errors: return (None, None) return (dead_branches, dead_saves)
[ "def", "dead_items", "(", "vfs_top", ",", "paths", ")", ":", "dead_branches", "=", "{", "}", "dead_saves", "=", "{", "}", "for", "path", "in", "paths", ":", "try", ":", "n", "=", "vfs_top", ".", "lresolve", "(", "path", ")", "except", "vfs", ".", "NodeError", "as", "e", ":", "add_error", "(", "(", "'unable to resolve %s: %s'", "%", "(", "path", ",", "e", ")", ")", ")", "else", ":", "if", "isinstance", "(", "n", ",", "vfs", ".", "BranchList", ")", ":", "branchname", "=", "n", ".", "name", "dead_branches", "[", "branchname", "]", "=", "n", "dead_saves", ".", "pop", "(", "branchname", ",", "None", ")", "elif", "(", "isinstance", "(", "n", ",", "vfs", ".", "FakeSymlink", ")", "and", "isinstance", "(", "n", ".", "parent", ",", "vfs", ".", "BranchList", ")", ")", ":", "if", "(", "n", ".", "name", "==", "'latest'", ")", ":", "add_error", "(", "\"error: cannot delete 'latest' symlink\"", ")", "else", ":", "branchname", "=", "n", ".", "parent", ".", "name", "if", "(", "branchname", "not", "in", "dead_branches", ")", ":", "dead_saves", ".", "setdefault", "(", "branchname", ",", "[", "]", ")", ".", "append", "(", "n", ")", "else", ":", "add_error", "(", "(", "\"don't know how to remove %r yet\"", "%", "n", ".", "fullname", "(", ")", ")", ")", "if", "saved_errors", ":", "return", "(", "None", ",", "None", ")", "return", "(", "dead_branches", ",", "dead_saves", ")" ]
return an optimized set of removals .
train
false
8,953
def make_paginator(request, items, per_page=50): try: page = int(request.GET.get('page', '1')) except ValueError: page = 1 paginator = Paginator(items, per_page) try: items = paginator.page(page) except (InvalidPage, EmptyPage): items = paginator.page(paginator.num_pages) return items
[ "def", "make_paginator", "(", "request", ",", "items", ",", "per_page", "=", "50", ")", ":", "try", ":", "page", "=", "int", "(", "request", ".", "GET", ".", "get", "(", "'page'", ",", "'1'", ")", ")", "except", "ValueError", ":", "page", "=", "1", "paginator", "=", "Paginator", "(", "items", ",", "per_page", ")", "try", ":", "items", "=", "paginator", ".", "page", "(", "page", ")", "except", "(", "InvalidPage", ",", "EmptyPage", ")", ":", "items", "=", "paginator", ".", "page", "(", "paginator", ".", "num_pages", ")", "return", "items" ]
make paginator .
train
false
8,954
def cxTwoPointCopy(ind1, ind2): size = len(ind1) cxpoint1 = random.randint(1, size) cxpoint2 = random.randint(1, (size - 1)) if (cxpoint2 >= cxpoint1): cxpoint2 += 1 else: (cxpoint1, cxpoint2) = (cxpoint2, cxpoint1) (ind1[cxpoint1:cxpoint2], ind2[cxpoint1:cxpoint2]) = (ind2[cxpoint1:cxpoint2].copy(), ind1[cxpoint1:cxpoint2].copy()) return (ind1, ind2)
[ "def", "cxTwoPointCopy", "(", "ind1", ",", "ind2", ")", ":", "size", "=", "len", "(", "ind1", ")", "cxpoint1", "=", "random", ".", "randint", "(", "1", ",", "size", ")", "cxpoint2", "=", "random", ".", "randint", "(", "1", ",", "(", "size", "-", "1", ")", ")", "if", "(", "cxpoint2", ">=", "cxpoint1", ")", ":", "cxpoint2", "+=", "1", "else", ":", "(", "cxpoint1", ",", "cxpoint2", ")", "=", "(", "cxpoint2", ",", "cxpoint1", ")", "(", "ind1", "[", "cxpoint1", ":", "cxpoint2", "]", ",", "ind2", "[", "cxpoint1", ":", "cxpoint2", "]", ")", "=", "(", "ind2", "[", "cxpoint1", ":", "cxpoint2", "]", ".", "copy", "(", ")", ",", "ind1", "[", "cxpoint1", ":", "cxpoint2", "]", ".", "copy", "(", ")", ")", "return", "(", "ind1", ",", "ind2", ")" ]
execute a two points crossover with copy on the input individuals .
train
false
8,956
def server_started_with_option(client, cmdline_opt, config_opt): command_line = get_command_line(client) if ('parsed' in command_line): parsed = command_line['parsed'] if (config_opt in parsed): return parsed[config_opt] argv = command_line['argv'] return (cmdline_opt in argv)
[ "def", "server_started_with_option", "(", "client", ",", "cmdline_opt", ",", "config_opt", ")", ":", "command_line", "=", "get_command_line", "(", "client", ")", "if", "(", "'parsed'", "in", "command_line", ")", ":", "parsed", "=", "command_line", "[", "'parsed'", "]", "if", "(", "config_opt", "in", "parsed", ")", ":", "return", "parsed", "[", "config_opt", "]", "argv", "=", "command_line", "[", "'argv'", "]", "return", "(", "cmdline_opt", "in", "argv", ")" ]
check if the server was started with a particular option .
train
false
8,957
def rgba(s, *args): if isinstance(s, string_types): return get_color_from_hex(s) elif isinstance(s, (list, tuple)): s = map((lambda x: (x / 255.0)), s) if (len(s) == 3): return (list(s) + [1]) return s elif isinstance(s, (int, float)): s = map((lambda x: (x / 255.0)), ([s] + list(args))) if (len(s) == 3): return (list(s) + [1]) return s raise Exception('Invalid value (not a string / list / tuple)')
[ "def", "rgba", "(", "s", ",", "*", "args", ")", ":", "if", "isinstance", "(", "s", ",", "string_types", ")", ":", "return", "get_color_from_hex", "(", "s", ")", "elif", "isinstance", "(", "s", ",", "(", "list", ",", "tuple", ")", ")", ":", "s", "=", "map", "(", "(", "lambda", "x", ":", "(", "x", "/", "255.0", ")", ")", ",", "s", ")", "if", "(", "len", "(", "s", ")", "==", "3", ")", ":", "return", "(", "list", "(", "s", ")", "+", "[", "1", "]", ")", "return", "s", "elif", "isinstance", "(", "s", ",", "(", "int", ",", "float", ")", ")", ":", "s", "=", "map", "(", "(", "lambda", "x", ":", "(", "x", "/", "255.0", ")", ")", ",", "(", "[", "s", "]", "+", "list", "(", "args", ")", ")", ")", "if", "(", "len", "(", "s", ")", "==", "3", ")", ":", "return", "(", "list", "(", "s", ")", "+", "[", "1", "]", ")", "return", "s", "raise", "Exception", "(", "'Invalid value (not a string / list / tuple)'", ")" ]
return a kivy color from either a hex string or a list of 0-255 values .
train
false
8,958
def button_action(button, action): connect_button(button, action.trigger)
[ "def", "button_action", "(", "button", ",", "action", ")", ":", "connect_button", "(", "button", ",", "action", ".", "trigger", ")" ]
make a button trigger an action .
train
false
8,959
def humanize_key(key): return u':'.join([(u'%02x' % ord(c)) for c in key.get_fingerprint()])
[ "def", "humanize_key", "(", "key", ")", ":", "return", "u':'", ".", "join", "(", "[", "(", "u'%02x'", "%", "ord", "(", "c", ")", ")", "for", "c", "in", "key", ".", "get_fingerprint", "(", ")", "]", ")" ]
returns a human-readable key as a series of hex characters .
train
false
8,960
def register_widget(widget): WIDGETS[widget.name] = widget return widget
[ "def", "register_widget", "(", "widget", ")", ":", "WIDGETS", "[", "widget", ".", "name", "]", "=", "widget", "return", "widget" ]
registers widget in dictionary .
train
false
8,962
def install_pyenv(name, user=None): ret = {'name': name, 'result': None, 'comment': '', 'changes': {}} if __opts__['test']: ret['comment'] = 'pyenv is set to be installed' return ret return _check_and_install_python(ret, user)
[ "def", "install_pyenv", "(", "name", ",", "user", "=", "None", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "None", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "if", "__opts__", "[", "'test'", "]", ":", "ret", "[", "'comment'", "]", "=", "'pyenv is set to be installed'", "return", "ret", "return", "_check_and_install_python", "(", "ret", ",", "user", ")" ]
install pyenv if not installed .
train
true
8,964
@composite def unique_name_strategy(draw): return unicode(draw(st.uuids()))
[ "@", "composite", "def", "unique_name_strategy", "(", "draw", ")", ":", "return", "unicode", "(", "draw", "(", "st", ".", "uuids", "(", ")", ")", ")" ]
a hypothesis strategy to generate an always unique name .
train
false
8,965
def has_uniform_batch_size(mode): return resolve_iterator_class(mode).uniform_batch_size
[ "def", "has_uniform_batch_size", "(", "mode", ")", ":", "return", "resolve_iterator_class", "(", "mode", ")", ".", "uniform_batch_size" ]
returns true if the iteration scheme has uniform batch size .
train
false
8,966
def maybe_callable(thing, context=None): if callable(thing): return thing if isinstance(thing, six.string_types): thing = getattr(context, thing, None) if callable(thing): return thing return None
[ "def", "maybe_callable", "(", "thing", ",", "context", "=", "None", ")", ":", "if", "callable", "(", "thing", ")", ":", "return", "thing", "if", "isinstance", "(", "thing", ",", "six", ".", "string_types", ")", ":", "thing", "=", "getattr", "(", "context", ",", "thing", ",", "None", ")", "if", "callable", "(", "thing", ")", ":", "return", "thing", "return", "None" ]
if thing is callable .
train
false
8,967
def _list_queues(): queue_dir = __opts__['sqlite_queue_dir'] files = os.path.join(queue_dir, '*.db') paths = glob.glob(files) queues = [os.path.splitext(os.path.basename(item))[0] for item in paths] return queues
[ "def", "_list_queues", "(", ")", ":", "queue_dir", "=", "__opts__", "[", "'sqlite_queue_dir'", "]", "files", "=", "os", ".", "path", ".", "join", "(", "queue_dir", ",", "'*.db'", ")", "paths", "=", "glob", ".", "glob", "(", "files", ")", "queues", "=", "[", "os", ".", "path", ".", "splitext", "(", "os", ".", "path", ".", "basename", "(", "item", ")", ")", "[", "0", "]", "for", "item", "in", "paths", "]", "return", "queues" ]
return a list of sqlite databases in the queue_dir .
train
true
8,969
def same_node(node1, node2): return (node1.uuid == node2.uuid)
[ "def", "same_node", "(", "node1", ",", "node2", ")", ":", "return", "(", "node1", ".", "uuid", "==", "node2", ".", "uuid", ")" ]
return whether these two objects both refer to same cluster node .
train
false
8,970
def topic_name_from_path(path, project): return _name_from_project_path(path, project, _TOPIC_TEMPLATE)
[ "def", "topic_name_from_path", "(", "path", ",", "project", ")", ":", "return", "_name_from_project_path", "(", "path", ",", "project", ",", "_TOPIC_TEMPLATE", ")" ]
validate a topic uri path and get the topic name .
train
false
8,971
def total_sum_of_squares(y): return sum(((v ** 2) for v in de_mean(y)))
[ "def", "total_sum_of_squares", "(", "y", ")", ":", "return", "sum", "(", "(", "(", "v", "**", "2", ")", "for", "v", "in", "de_mean", "(", "y", ")", ")", ")" ]
the total squared variation of y_is from their mean .
train
false
8,972
def detail(device='/dev/md0'): ret = {} ret['members'] = {} if (not os.path.exists(device)): msg = "Device {0} doesn't exist!" raise CommandExecutionError(msg.format(device)) cmd = ['mdadm', '--detail', device] for line in __salt__['cmd.run_stdout'](cmd, python_shell=False).splitlines(): if line.startswith(device): continue if (' ' not in line): continue if (':' not in line): if ('/dev/' in line): comps = line.split() state = comps[4:(-1)] ret['members'][comps[0]] = {'device': comps[(-1)], 'major': comps[1], 'minor': comps[2], 'number': comps[0], 'raiddevice': comps[3], 'state': ' '.join(state)} continue comps = line.split(' : ') comps[0] = comps[0].lower() comps[0] = comps[0].strip() comps[0] = comps[0].replace(' ', '_') ret[comps[0]] = comps[1].strip() return ret
[ "def", "detail", "(", "device", "=", "'/dev/md0'", ")", ":", "ret", "=", "{", "}", "ret", "[", "'members'", "]", "=", "{", "}", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "device", ")", ")", ":", "msg", "=", "\"Device {0} doesn't exist!\"", "raise", "CommandExecutionError", "(", "msg", ".", "format", "(", "device", ")", ")", "cmd", "=", "[", "'mdadm'", ",", "'--detail'", ",", "device", "]", "for", "line", "in", "__salt__", "[", "'cmd.run_stdout'", "]", "(", "cmd", ",", "python_shell", "=", "False", ")", ".", "splitlines", "(", ")", ":", "if", "line", ".", "startswith", "(", "device", ")", ":", "continue", "if", "(", "' '", "not", "in", "line", ")", ":", "continue", "if", "(", "':'", "not", "in", "line", ")", ":", "if", "(", "'/dev/'", "in", "line", ")", ":", "comps", "=", "line", ".", "split", "(", ")", "state", "=", "comps", "[", "4", ":", "(", "-", "1", ")", "]", "ret", "[", "'members'", "]", "[", "comps", "[", "0", "]", "]", "=", "{", "'device'", ":", "comps", "[", "(", "-", "1", ")", "]", ",", "'major'", ":", "comps", "[", "1", "]", ",", "'minor'", ":", "comps", "[", "2", "]", ",", "'number'", ":", "comps", "[", "0", "]", ",", "'raiddevice'", ":", "comps", "[", "3", "]", ",", "'state'", ":", "' '", ".", "join", "(", "state", ")", "}", "continue", "comps", "=", "line", ".", "split", "(", "' : '", ")", "comps", "[", "0", "]", "=", "comps", "[", "0", "]", ".", "lower", "(", ")", "comps", "[", "0", "]", "=", "comps", "[", "0", "]", ".", "strip", "(", ")", "comps", "[", "0", "]", "=", "comps", "[", "0", "]", ".", "replace", "(", "' '", ",", "'_'", ")", "ret", "[", "comps", "[", "0", "]", "]", "=", "comps", "[", "1", "]", ".", "strip", "(", ")", "return", "ret" ]
action detail view .
train
true
8,973
def is_float(input_string): try: float(input_string) return True except ValueError: return False
[ "def", "is_float", "(", "input_string", ")", ":", "try", ":", "float", "(", "input_string", ")", "return", "True", "except", "ValueError", ":", "return", "False" ]
a check that tests that a given value is a float .
train
false
8,974
def disable_server(name, backend, socket='/var/run/haproxy.sock'): if (backend == '*'): backends = show_backends(socket=socket).split('\n') else: backends = [backend] results = {} for backend in backends: ha_conn = _get_conn(socket) ha_cmd = haproxy.cmds.disableServer(server=name, backend=backend) ha_conn.sendCmd(ha_cmd) results[backend] = list_servers(backend, socket=socket) return results
[ "def", "disable_server", "(", "name", ",", "backend", ",", "socket", "=", "'/var/run/haproxy.sock'", ")", ":", "if", "(", "backend", "==", "'*'", ")", ":", "backends", "=", "show_backends", "(", "socket", "=", "socket", ")", ".", "split", "(", "'\\n'", ")", "else", ":", "backends", "=", "[", "backend", "]", "results", "=", "{", "}", "for", "backend", "in", "backends", ":", "ha_conn", "=", "_get_conn", "(", "socket", ")", "ha_cmd", "=", "haproxy", ".", "cmds", ".", "disableServer", "(", "server", "=", "name", ",", "backend", "=", "backend", ")", "ha_conn", ".", "sendCmd", "(", "ha_cmd", ")", "results", "[", "backend", "]", "=", "list_servers", "(", "backend", ",", "socket", "=", "socket", ")", "return", "results" ]
disable server .
train
false
8,975
@addons_reviewer_required def beta_signed_log(request): form = forms.BetaSignedLogForm(request.GET) beta_signed_log = ActivityLog.objects.beta_signed_events() motd_editable = acl.action_allowed(request, 'AddonReviewerMOTD', 'Edit') if form.is_valid(): if form.cleaned_data['filter']: beta_signed_log = beta_signed_log.filter(action=form.cleaned_data['filter']) pager = amo.utils.paginate(request, beta_signed_log, 50) data = context(request, form=form, pager=pager, motd_editable=motd_editable) return render(request, 'editors/beta_signed_log.html', data)
[ "@", "addons_reviewer_required", "def", "beta_signed_log", "(", "request", ")", ":", "form", "=", "forms", ".", "BetaSignedLogForm", "(", "request", ".", "GET", ")", "beta_signed_log", "=", "ActivityLog", ".", "objects", ".", "beta_signed_events", "(", ")", "motd_editable", "=", "acl", ".", "action_allowed", "(", "request", ",", "'AddonReviewerMOTD'", ",", "'Edit'", ")", "if", "form", ".", "is_valid", "(", ")", ":", "if", "form", ".", "cleaned_data", "[", "'filter'", "]", ":", "beta_signed_log", "=", "beta_signed_log", ".", "filter", "(", "action", "=", "form", ".", "cleaned_data", "[", "'filter'", "]", ")", "pager", "=", "amo", ".", "utils", ".", "paginate", "(", "request", ",", "beta_signed_log", ",", "50", ")", "data", "=", "context", "(", "request", ",", "form", "=", "form", ",", "pager", "=", "pager", ",", "motd_editable", "=", "motd_editable", ")", "return", "render", "(", "request", ",", "'editors/beta_signed_log.html'", ",", "data", ")" ]
log of all the beta files that got signed .
train
false
8,977
def read_fiducials(fname): (fid, tree, _) = fiff_open(fname) with fid: isotrak = dir_tree_find(tree, FIFF.FIFFB_ISOTRAK) isotrak = isotrak[0] pts = [] coord_frame = FIFF.FIFFV_COORD_UNKNOWN for k in range(isotrak['nent']): kind = isotrak['directory'][k].kind pos = isotrak['directory'][k].pos if (kind == FIFF.FIFF_DIG_POINT): tag = read_tag(fid, pos) pts.append(tag.data) elif (kind == FIFF.FIFF_MNE_COORD_FRAME): tag = read_tag(fid, pos) coord_frame = tag.data[0] if (coord_frame == FIFF.FIFFV_COORD_UNKNOWN): err = ('No coordinate frame was found in the file %r, it is probably not a valid fiducials file.' % fname) raise ValueError(err) for pt in pts: pt['coord_frame'] = coord_frame return (pts, coord_frame)
[ "def", "read_fiducials", "(", "fname", ")", ":", "(", "fid", ",", "tree", ",", "_", ")", "=", "fiff_open", "(", "fname", ")", "with", "fid", ":", "isotrak", "=", "dir_tree_find", "(", "tree", ",", "FIFF", ".", "FIFFB_ISOTRAK", ")", "isotrak", "=", "isotrak", "[", "0", "]", "pts", "=", "[", "]", "coord_frame", "=", "FIFF", ".", "FIFFV_COORD_UNKNOWN", "for", "k", "in", "range", "(", "isotrak", "[", "'nent'", "]", ")", ":", "kind", "=", "isotrak", "[", "'directory'", "]", "[", "k", "]", ".", "kind", "pos", "=", "isotrak", "[", "'directory'", "]", "[", "k", "]", ".", "pos", "if", "(", "kind", "==", "FIFF", ".", "FIFF_DIG_POINT", ")", ":", "tag", "=", "read_tag", "(", "fid", ",", "pos", ")", "pts", ".", "append", "(", "tag", ".", "data", ")", "elif", "(", "kind", "==", "FIFF", ".", "FIFF_MNE_COORD_FRAME", ")", ":", "tag", "=", "read_tag", "(", "fid", ",", "pos", ")", "coord_frame", "=", "tag", ".", "data", "[", "0", "]", "if", "(", "coord_frame", "==", "FIFF", ".", "FIFFV_COORD_UNKNOWN", ")", ":", "err", "=", "(", "'No coordinate frame was found in the file %r, it is probably not a valid fiducials file.'", "%", "fname", ")", "raise", "ValueError", "(", "err", ")", "for", "pt", "in", "pts", ":", "pt", "[", "'coord_frame'", "]", "=", "coord_frame", "return", "(", "pts", ",", "coord_frame", ")" ]
read fiducials from a fiff file .
train
false
8,978
def _loop_payload(params): payload = {} for (param, value) in six.iteritems(params): if (value is not None): payload[param] = value return payload
[ "def", "_loop_payload", "(", "params", ")", ":", "payload", "=", "{", "}", "for", "(", "param", ",", "value", ")", "in", "six", ".", "iteritems", "(", "params", ")", ":", "if", "(", "value", "is", "not", "None", ")", ":", "payload", "[", "param", "]", "=", "value", "return", "payload" ]
pass in a dictionary of parameters .
train
true
8,980
def era_main(): return FlockerScriptRunner(script=EraScript(), options=EraOptions(), logging=False).main()
[ "def", "era_main", "(", ")", ":", "return", "FlockerScriptRunner", "(", "script", "=", "EraScript", "(", ")", ",", "options", "=", "EraOptions", "(", ")", ",", "logging", "=", "False", ")", ".", "main", "(", ")" ]
entry point for flocker-node-era command-line tool .
train
false
8,981
def get_location_metres(original_location, dNorth, dEast): earth_radius = 6378137.0 dLat = (dNorth / earth_radius) dLon = (dEast / (earth_radius * math.cos(((math.pi * original_location.lat) / 180)))) newlat = (original_location.lat + ((dLat * 180) / math.pi)) newlon = (original_location.lon + ((dLon * 180) / math.pi)) return LocationGlobal(newlat, newlon, original_location.alt)
[ "def", "get_location_metres", "(", "original_location", ",", "dNorth", ",", "dEast", ")", ":", "earth_radius", "=", "6378137.0", "dLat", "=", "(", "dNorth", "/", "earth_radius", ")", "dLon", "=", "(", "dEast", "/", "(", "earth_radius", "*", "math", ".", "cos", "(", "(", "(", "math", ".", "pi", "*", "original_location", ".", "lat", ")", "/", "180", ")", ")", ")", ")", "newlat", "=", "(", "original_location", ".", "lat", "+", "(", "(", "dLat", "*", "180", ")", "/", "math", ".", "pi", ")", ")", "newlon", "=", "(", "original_location", ".", "lon", "+", "(", "(", "dLon", "*", "180", ")", "/", "math", ".", "pi", ")", ")", "return", "LocationGlobal", "(", "newlat", ",", "newlon", ",", "original_location", ".", "alt", ")" ]
returns a locationglobal object containing the latitude/longitude dnorth and deast metres from the specified original_location .
train
true
8,982
def islink(p): return _false
[ "def", "islink", "(", "p", ")", ":", "return", "_false" ]
test for symbolic link .
train
false
8,983
def write_timeseries_value(client, project_resource, custom_metric_type, instance_id, metric_kind): now = get_now_rfc3339() timeseries_data = {'metric': {'type': custom_metric_type, 'labels': {'environment': 'STAGING'}}, 'resource': {'type': 'gce_instance', 'labels': {'instance_id': instance_id, 'zone': 'us-central1-f'}}, 'points': [{'interval': {'startTime': now, 'endTime': now}, 'value': {'int64Value': get_custom_data_point()}}]} request = client.projects().timeSeries().create(name=project_resource, body={'timeSeries': [timeseries_data]}) request.execute()
[ "def", "write_timeseries_value", "(", "client", ",", "project_resource", ",", "custom_metric_type", ",", "instance_id", ",", "metric_kind", ")", ":", "now", "=", "get_now_rfc3339", "(", ")", "timeseries_data", "=", "{", "'metric'", ":", "{", "'type'", ":", "custom_metric_type", ",", "'labels'", ":", "{", "'environment'", ":", "'STAGING'", "}", "}", ",", "'resource'", ":", "{", "'type'", ":", "'gce_instance'", ",", "'labels'", ":", "{", "'instance_id'", ":", "instance_id", ",", "'zone'", ":", "'us-central1-f'", "}", "}", ",", "'points'", ":", "[", "{", "'interval'", ":", "{", "'startTime'", ":", "now", ",", "'endTime'", ":", "now", "}", ",", "'value'", ":", "{", "'int64Value'", ":", "get_custom_data_point", "(", ")", "}", "}", "]", "}", "request", "=", "client", ".", "projects", "(", ")", ".", "timeSeries", "(", ")", ".", "create", "(", "name", "=", "project_resource", ",", "body", "=", "{", "'timeSeries'", ":", "[", "timeseries_data", "]", "}", ")", "request", ".", "execute", "(", ")" ]
write the custom metric obtained by get_custom_data_point at a point in time .
train
false
8,985
def messy_split(long_line): new_list = [] old_list = long_line.split('=') for line in old_list: if (len(line) != 0): line += ('=' * ((4 - (len(line) % 4)) % 4)) new_list.append(line) return new_list
[ "def", "messy_split", "(", "long_line", ")", ":", "new_list", "=", "[", "]", "old_list", "=", "long_line", ".", "split", "(", "'='", ")", "for", "line", "in", "old_list", ":", "if", "(", "len", "(", "line", ")", "!=", "0", ")", ":", "line", "+=", "(", "'='", "*", "(", "(", "4", "-", "(", "len", "(", "line", ")", "%", "4", ")", ")", "%", "4", ")", ")", "new_list", ".", "append", "(", "line", ")", "return", "new_list" ]
split on = gives me the right sections but deletes the b64 padding use modulo math to restore padding .
train
false
8,986
def lfsr_sequence(key, fill, n): if (not isinstance(key, list)): raise TypeError('key must be a list') if (not isinstance(fill, list)): raise TypeError('fill must be a list') p = key[0].mod F = FF(p) s = fill k = len(fill) L = [] for i in range(n): s0 = s[:] L.append(s[0]) s = s[1:k] x = sum([int((key[i] * s0[i])) for i in range(k)]) s.append(F(x)) return L
[ "def", "lfsr_sequence", "(", "key", ",", "fill", ",", "n", ")", ":", "if", "(", "not", "isinstance", "(", "key", ",", "list", ")", ")", ":", "raise", "TypeError", "(", "'key must be a list'", ")", "if", "(", "not", "isinstance", "(", "fill", ",", "list", ")", ")", ":", "raise", "TypeError", "(", "'fill must be a list'", ")", "p", "=", "key", "[", "0", "]", ".", "mod", "F", "=", "FF", "(", "p", ")", "s", "=", "fill", "k", "=", "len", "(", "fill", ")", "L", "=", "[", "]", "for", "i", "in", "range", "(", "n", ")", ":", "s0", "=", "s", "[", ":", "]", "L", ".", "append", "(", "s", "[", "0", "]", ")", "s", "=", "s", "[", "1", ":", "k", "]", "x", "=", "sum", "(", "[", "int", "(", "(", "key", "[", "i", "]", "*", "s0", "[", "i", "]", ")", ")", "for", "i", "in", "range", "(", "k", ")", "]", ")", "s", ".", "append", "(", "F", "(", "x", ")", ")", "return", "L" ]
this function creates an lfsr sequence .
train
false
8,987
def system_path(): ca_path = None paths = [u'/usr/lib/ssl/certs/ca-certificates.crt', u'/etc/ssl/certs/ca-certificates.crt', u'/etc/ssl/certs/ca-bundle.crt', u'/etc/pki/tls/certs/ca-bundle.crt', u'/etc/ssl/ca-bundle.pem', u'/usr/local/share/certs/ca-root-nss.crt', u'/etc/ssl/cert.pem'] if (u'SSL_CERT_FILE' in os.environ): paths.insert(0, os.environ[u'SSL_CERT_FILE']) for path in paths: if (os.path.exists(path) and (os.path.getsize(path) > 0)): ca_path = path break if (not ca_path): raise OSError(pretty_message(u'\n Unable to find a CA certs bundle in common locations - try\n setting the SSL_CERT_FILE environmental variable\n ')) return ca_path
[ "def", "system_path", "(", ")", ":", "ca_path", "=", "None", "paths", "=", "[", "u'/usr/lib/ssl/certs/ca-certificates.crt'", ",", "u'/etc/ssl/certs/ca-certificates.crt'", ",", "u'/etc/ssl/certs/ca-bundle.crt'", ",", "u'/etc/pki/tls/certs/ca-bundle.crt'", ",", "u'/etc/ssl/ca-bundle.pem'", ",", "u'/usr/local/share/certs/ca-root-nss.crt'", ",", "u'/etc/ssl/cert.pem'", "]", "if", "(", "u'SSL_CERT_FILE'", "in", "os", ".", "environ", ")", ":", "paths", ".", "insert", "(", "0", ",", "os", ".", "environ", "[", "u'SSL_CERT_FILE'", "]", ")", "for", "path", "in", "paths", ":", "if", "(", "os", ".", "path", ".", "exists", "(", "path", ")", "and", "(", "os", ".", "path", ".", "getsize", "(", "path", ")", ">", "0", ")", ")", ":", "ca_path", "=", "path", "break", "if", "(", "not", "ca_path", ")", ":", "raise", "OSError", "(", "pretty_message", "(", "u'\\n Unable to find a CA certs bundle in common locations - try\\n setting the SSL_CERT_FILE environmental variable\\n '", ")", ")", "return", "ca_path" ]
tries to find a ca certs bundle in common locations :raises: oserror - when no valid ca certs bundle was found on the filesystem :return: the full filesystem path to a ca certs bundle file .
train
false
8,988
def show_all_prices(call=None, kwargs=None): if (call == 'action'): raise SaltCloudSystemExit('The show_all_prices function must be called with -f or --function.') if (kwargs is None): kwargs = {} conn = get_conn(service='SoftLayer_Product_Package') if ('code' not in kwargs): return conn.getCategories(id=50) ret = {} for category in conn.getCategories(id=50): if (category['categoryCode'] != kwargs['code']): continue for group in category['groups']: for price in group['prices']: ret[price['id']] = price['item'].copy() del ret[price['id']]['id'] return ret
[ "def", "show_all_prices", "(", "call", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "(", "call", "==", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The show_all_prices function must be called with -f or --function.'", ")", "if", "(", "kwargs", "is", "None", ")", ":", "kwargs", "=", "{", "}", "conn", "=", "get_conn", "(", "service", "=", "'SoftLayer_Product_Package'", ")", "if", "(", "'code'", "not", "in", "kwargs", ")", ":", "return", "conn", ".", "getCategories", "(", "id", "=", "50", ")", "ret", "=", "{", "}", "for", "category", "in", "conn", ".", "getCategories", "(", "id", "=", "50", ")", ":", "if", "(", "category", "[", "'categoryCode'", "]", "!=", "kwargs", "[", "'code'", "]", ")", ":", "continue", "for", "group", "in", "category", "[", "'groups'", "]", ":", "for", "price", "in", "group", "[", "'prices'", "]", ":", "ret", "[", "price", "[", "'id'", "]", "]", "=", "price", "[", "'item'", "]", ".", "copy", "(", ")", "del", "ret", "[", "price", "[", "'id'", "]", "]", "[", "'id'", "]", "return", "ret" ]
return a dict of all prices on the cloud provider .
train
true
8,990
def pnio_get_config(pkt): ether = _get_ethernet(pkt) config = None if ((ether is not None) and ((ether.src, ether.dst) in conf.contribs['PNIO_RTC'])): config = conf.contribs['PNIO_RTC'][(ether.src, ether.dst)] return config
[ "def", "pnio_get_config", "(", "pkt", ")", ":", "ether", "=", "_get_ethernet", "(", "pkt", ")", "config", "=", "None", "if", "(", "(", "ether", "is", "not", "None", ")", "and", "(", "(", "ether", ".", "src", ",", "ether", ".", "dst", ")", "in", "conf", ".", "contribs", "[", "'PNIO_RTC'", "]", ")", ")", ":", "config", "=", "conf", ".", "contribs", "[", "'PNIO_RTC'", "]", "[", "(", "ether", ".", "src", ",", "ether", ".", "dst", ")", "]", "return", "config" ]
retrieve the config for a given communication .
train
false
8,992
def node_region(patched_ast_node): return patched_ast_node.region
[ "def", "node_region", "(", "patched_ast_node", ")", ":", "return", "patched_ast_node", ".", "region" ]
get the region of a patched ast node .
train
false
8,993
def floating_ip_get_all_by_host(context, host): return IMPL.floating_ip_get_all_by_host(context, host)
[ "def", "floating_ip_get_all_by_host", "(", "context", ",", "host", ")", ":", "return", "IMPL", ".", "floating_ip_get_all_by_host", "(", "context", ",", "host", ")" ]
get all floating ips by host .
train
false
8,996
def cpu_usage_for_process(results, process): process_results = [r for r in results if ((r['metric']['type'] == 'cputime') and (r['process'] == process))] cpu_values = sum((r['value'] for r in process_results)) wallclock_values = sum((r['wallclock'] for r in process_results)) if (wallclock_values > 0): return (float(cpu_values) / wallclock_values) return None
[ "def", "cpu_usage_for_process", "(", "results", ",", "process", ")", ":", "process_results", "=", "[", "r", "for", "r", "in", "results", "if", "(", "(", "r", "[", "'metric'", "]", "[", "'type'", "]", "==", "'cputime'", ")", "and", "(", "r", "[", "'process'", "]", "==", "process", ")", ")", "]", "cpu_values", "=", "sum", "(", "(", "r", "[", "'value'", "]", "for", "r", "in", "process_results", ")", ")", "wallclock_values", "=", "sum", "(", "(", "r", "[", "'wallclock'", "]", "for", "r", "in", "process_results", ")", ")", "if", "(", "wallclock_values", ">", "0", ")", ":", "return", "(", "float", "(", "cpu_values", ")", "/", "wallclock_values", ")", "return", "None" ]
calculate the cpu percentage for a process running in a particular scenario .
train
false
8,998
def retrieveAcknowledge(): a = TpPd(pd=3) b = MessageType(mesType=29) packet = (a / b) return packet
[ "def", "retrieveAcknowledge", "(", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "3", ")", "b", "=", "MessageType", "(", "mesType", "=", "29", ")", "packet", "=", "(", "a", "/", "b", ")", "return", "packet" ]
retrieve acknowledge section 9 .
train
true
8,999
def _adjust_start_date_for_beta_testers(user, descriptor, course_key): return adjust_start_date(user, descriptor.days_early_for_beta, descriptor.start, course_key)
[ "def", "_adjust_start_date_for_beta_testers", "(", "user", ",", "descriptor", ",", "course_key", ")", ":", "return", "adjust_start_date", "(", "user", ",", "descriptor", ".", "days_early_for_beta", ",", "descriptor", ".", "start", ",", "course_key", ")" ]
if user is in a beta test group .
train
false
9,001
def configure_debug_log_handlers(logger): global printed_log_start_message logger.setLevel(logging.DEBUG) logging_to_file = True try: make_sure_path_exists(os.path.dirname(log_filepath), 448) debug_handler = logging.FileHandler(log_filepath) except (OSError, IOError): logging_to_file = False debug_handler = logging.StreamHandler() debug_handler.setLevel(logging.DEBUG) important_handler = logging.StreamHandler() important_handler.setLevel(logging.WARNING) logger.addHandler(debug_handler) logger.addHandler(important_handler) if (not printed_log_start_message): logger.info(u'!-- begin debug log --!') logger.info((u'version: ' + __version__)) if logging_to_file: logger.info((u'logging to: ' + log_filepath)) printed_log_start_message = True formatter = logging.Formatter(u'%(asctime)s - %(name)s (%(module)s:%(lineno)s) [%(levelname)s]: %(message)s') debug_handler.setFormatter(formatter) important_handler.setFormatter(formatter)
[ "def", "configure_debug_log_handlers", "(", "logger", ")", ":", "global", "printed_log_start_message", "logger", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "logging_to_file", "=", "True", "try", ":", "make_sure_path_exists", "(", "os", ".", "path", ".", "dirname", "(", "log_filepath", ")", ",", "448", ")", "debug_handler", "=", "logging", ".", "FileHandler", "(", "log_filepath", ")", "except", "(", "OSError", ",", "IOError", ")", ":", "logging_to_file", "=", "False", "debug_handler", "=", "logging", ".", "StreamHandler", "(", ")", "debug_handler", ".", "setLevel", "(", "logging", ".", "DEBUG", ")", "important_handler", "=", "logging", ".", "StreamHandler", "(", ")", "important_handler", ".", "setLevel", "(", "logging", ".", "WARNING", ")", "logger", ".", "addHandler", "(", "debug_handler", ")", "logger", ".", "addHandler", "(", "important_handler", ")", "if", "(", "not", "printed_log_start_message", ")", ":", "logger", ".", "info", "(", "u'!-- begin debug log --!'", ")", "logger", ".", "info", "(", "(", "u'version: '", "+", "__version__", ")", ")", "if", "logging_to_file", ":", "logger", ".", "info", "(", "(", "u'logging to: '", "+", "log_filepath", ")", ")", "printed_log_start_message", "=", "True", "formatter", "=", "logging", ".", "Formatter", "(", "u'%(asctime)s - %(name)s (%(module)s:%(lineno)s) [%(levelname)s]: %(message)s'", ")", "debug_handler", ".", "setFormatter", "(", "formatter", ")", "important_handler", ".", "setFormatter", "(", "formatter", ")" ]
warnings and above to stderr .
train
false
9,002
def hrm_training_month(row): if hasattr(row, 'hrm_training'): row = row.hrm_training try: date = row.date except AttributeError: date = None if date: return ('%s/%02d' % (date.year, date.month)) else: return current.messages['NONE']
[ "def", "hrm_training_month", "(", "row", ")", ":", "if", "hasattr", "(", "row", ",", "'hrm_training'", ")", ":", "row", "=", "row", ".", "hrm_training", "try", ":", "date", "=", "row", ".", "date", "except", "AttributeError", ":", "date", "=", "None", "if", "date", ":", "return", "(", "'%s/%02d'", "%", "(", "date", ".", "year", ",", "date", ".", "month", ")", ")", "else", ":", "return", "current", ".", "messages", "[", "'NONE'", "]" ]
year/month of the start date of the training event .
train
false
9,004
def load_pycryptodome_raw_lib(name, cdecl): split = name.split('.') (dir_comps, basename) = (split[:(-1)], split[(-1)]) for (ext, mod, typ) in imp.get_suffixes(): if (typ == imp.C_EXTENSION): try: return load_lib(pycryptodome_filename(dir_comps, (basename + ext)), cdecl) except OSError: pass raise OSError(("Cannot load native module '%s'" % name))
[ "def", "load_pycryptodome_raw_lib", "(", "name", ",", "cdecl", ")", ":", "split", "=", "name", ".", "split", "(", "'.'", ")", "(", "dir_comps", ",", "basename", ")", "=", "(", "split", "[", ":", "(", "-", "1", ")", "]", ",", "split", "[", "(", "-", "1", ")", "]", ")", "for", "(", "ext", ",", "mod", ",", "typ", ")", "in", "imp", ".", "get_suffixes", "(", ")", ":", "if", "(", "typ", "==", "imp", ".", "C_EXTENSION", ")", ":", "try", ":", "return", "load_lib", "(", "pycryptodome_filename", "(", "dir_comps", ",", "(", "basename", "+", "ext", ")", ")", ",", "cdecl", ")", "except", "OSError", ":", "pass", "raise", "OSError", "(", "(", "\"Cannot load native module '%s'\"", "%", "name", ")", ")" ]
load a shared library and return a handle to it .
train
false
9,006
def enable_insecure_serializers(choices=[u'pickle', u'yaml', u'msgpack']): for choice in choices: try: registry.enable(choice) except KeyError: pass
[ "def", "enable_insecure_serializers", "(", "choices", "=", "[", "u'pickle'", ",", "u'yaml'", ",", "u'msgpack'", "]", ")", ":", "for", "choice", "in", "choices", ":", "try", ":", "registry", ".", "enable", "(", "choice", ")", "except", "KeyError", ":", "pass" ]
enable serializers that are considered to be unsafe .
train
false
9,007
def get_all_groups(path_prefix='/', region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if (not conn): return None _groups = conn.get_all_groups(path_prefix=path_prefix) groups = _groups.list_groups_response.list_groups_result.groups marker = getattr(_groups.list_groups_response.list_groups_result, 'marker', None) while marker: _groups = conn.get_all_groups(path_prefix=path_prefix, marker=marker) groups = (groups + _groups.list_groups_response.list_groups_result.groups) marker = getattr(_groups.list_groups_response.list_groups_result, 'marker', None) return groups
[ "def", "get_all_groups", "(", "path_prefix", "=", "'/'", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "not", "conn", ")", ":", "return", "None", "_groups", "=", "conn", ".", "get_all_groups", "(", "path_prefix", "=", "path_prefix", ")", "groups", "=", "_groups", ".", "list_groups_response", ".", "list_groups_result", ".", "groups", "marker", "=", "getattr", "(", "_groups", ".", "list_groups_response", ".", "list_groups_result", ",", "'marker'", ",", "None", ")", "while", "marker", ":", "_groups", "=", "conn", ".", "get_all_groups", "(", "path_prefix", "=", "path_prefix", ",", "marker", "=", "marker", ")", "groups", "=", "(", "groups", "+", "_groups", ".", "list_groups_response", ".", "list_groups_result", ".", "groups", ")", "marker", "=", "getattr", "(", "_groups", ".", "list_groups_response", ".", "list_groups_result", ",", "'marker'", ",", "None", ")", "return", "groups" ]
retrieve all ansible groups .
train
true
9,008
def nonthreadsafe(fn): @wraps(fn) def core(*args, **kwargs): with _cuda_compiler_lock: return fn(*args, **kwargs) return core
[ "def", "nonthreadsafe", "(", "fn", ")", ":", "@", "wraps", "(", "fn", ")", "def", "core", "(", "*", "args", ",", "**", "kwargs", ")", ":", "with", "_cuda_compiler_lock", ":", "return", "fn", "(", "*", "args", ",", "**", "kwargs", ")", "return", "core" ]
wraps a function to prevent multiple threads from executing it in parallel due to llvm is not threadsafe .
train
false
9,009
def get_master_uri(required=True, env=None, argv=None): if (env is None): env = os.environ if (argv is None): argv = sys.argv try: for arg in argv: if arg.startswith('__master:='): val = None try: (_, val) = arg.split(':=') except: pass if (not val): raise ROSEnvException(("__master remapping argument '%s' improperly specified" % arg)) return val return env[ROS_MASTER_URI] except KeyError as e: if required: raise ROSEnvException(('%s has not been configured' % ROS_MASTER_URI))
[ "def", "get_master_uri", "(", "required", "=", "True", ",", "env", "=", "None", ",", "argv", "=", "None", ")", ":", "if", "(", "env", "is", "None", ")", ":", "env", "=", "os", ".", "environ", "if", "(", "argv", "is", "None", ")", ":", "argv", "=", "sys", ".", "argv", "try", ":", "for", "arg", "in", "argv", ":", "if", "arg", ".", "startswith", "(", "'__master:='", ")", ":", "val", "=", "None", "try", ":", "(", "_", ",", "val", ")", "=", "arg", ".", "split", "(", "':='", ")", "except", ":", "pass", "if", "(", "not", "val", ")", ":", "raise", "ROSEnvException", "(", "(", "\"__master remapping argument '%s' improperly specified\"", "%", "arg", ")", ")", "return", "val", "return", "env", "[", "ROS_MASTER_URI", "]", "except", "KeyError", "as", "e", ":", "if", "required", ":", "raise", "ROSEnvException", "(", "(", "'%s has not been configured'", "%", "ROS_MASTER_URI", ")", ")" ]
get the ros_master_uri setting from the command-line args or environment .
train
false
9,010
def add_tagids(source): return _modify_tagids(source)
[ "def", "add_tagids", "(", "source", ")", ":", "return", "_modify_tagids", "(", "source", ")" ]
applies a unique attribute code number for each tag element in order to be identified later in the process of apply annotation .
train
false
9,012
def get_localzone(): global _cache_tz if (_cache_tz is None): _cache_tz = pytz.timezone(get_localzone_name()) return _cache_tz
[ "def", "get_localzone", "(", ")", ":", "global", "_cache_tz", "if", "(", "_cache_tz", "is", "None", ")", ":", "_cache_tz", "=", "pytz", ".", "timezone", "(", "get_localzone_name", "(", ")", ")", "return", "_cache_tz" ]
returns the current underlying local timezone object .
train
false
9,013
def autosummary_toc_visit_html(self, node): raise nodes.SkipNode
[ "def", "autosummary_toc_visit_html", "(", "self", ",", "node", ")", ":", "raise", "nodes", ".", "SkipNode" ]
hide autosummary toctree list in html output .
train
false
9,014
def get_stock_basics(date=None): wdate = (du.last_tddate() if (date is None) else date) wdate = wdate.replace('-', '') if (wdate < '20160809'): return None datepre = ('' if (date is None) else ((wdate[0:4] + wdate[4:6]) + '/')) request = Request((ct.ALL_STOCK_BASICS_FILE % (datepre, ('' if (date is None) else wdate)))) text = urlopen(request, timeout=10).read() text = text.decode('GBK') text = text.replace('--', '') df = pd.read_csv(StringIO(text), dtype={'code': 'object'}) df = df.set_index('code') return df
[ "def", "get_stock_basics", "(", "date", "=", "None", ")", ":", "wdate", "=", "(", "du", ".", "last_tddate", "(", ")", "if", "(", "date", "is", "None", ")", "else", "date", ")", "wdate", "=", "wdate", ".", "replace", "(", "'-'", ",", "''", ")", "if", "(", "wdate", "<", "'20160809'", ")", ":", "return", "None", "datepre", "=", "(", "''", "if", "(", "date", "is", "None", ")", "else", "(", "(", "wdate", "[", "0", ":", "4", "]", "+", "wdate", "[", "4", ":", "6", "]", ")", "+", "'/'", ")", ")", "request", "=", "Request", "(", "(", "ct", ".", "ALL_STOCK_BASICS_FILE", "%", "(", "datepre", ",", "(", "''", "if", "(", "date", "is", "None", ")", "else", "wdate", ")", ")", ")", ")", "text", "=", "urlopen", "(", "request", ",", "timeout", "=", "10", ")", ".", "read", "(", ")", "text", "=", "text", ".", "decode", "(", "'GBK'", ")", "text", "=", "text", ".", "replace", "(", "'--'", ",", "''", ")", "df", "=", "pd", ".", "read_csv", "(", "StringIO", "(", "text", ")", ",", "dtype", "=", "{", "'code'", ":", "'object'", "}", ")", "df", "=", "df", ".", "set_index", "(", "'code'", ")", "return", "df" ]
parameters date:日期yyyy-mm-dd,默认为上一个交易日,目前只能提供2016-08-09之后的历史数据 return dataframe code .
train
false
9,015
@pytest.mark.parametrize((u'in_val', u'in_unit'), [([0.1, 5000.0, 10000.0], u.AA), ([100000.0, 2.0, 1.0], (u.micron ** (-1))), ([2.99792458e+19, 599584916000000.0, 299792458000000.0], u.Hz), ([1.98644568e-14, 3.97289137e-19, 1.98644568e-19], u.J)]) def test_spectral4(in_val, in_unit): out_units = [(u.micron ** (-1)), (u.radian / u.micron)] answers = [[100000.0, 2.0, 1.0], [628318.531, 12.5663706, 6.28318531]] for (out_unit, ans) in zip(out_units, answers): a = in_unit.to(out_unit, in_val, u.spectral()) assert_allclose(a, ans) b = out_unit.to(in_unit, ans, u.spectral()) assert_allclose(b, in_val)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "(", "u'in_val'", ",", "u'in_unit'", ")", ",", "[", "(", "[", "0.1", ",", "5000.0", ",", "10000.0", "]", ",", "u", ".", "AA", ")", ",", "(", "[", "100000.0", ",", "2.0", ",", "1.0", "]", ",", "(", "u", ".", "micron", "**", "(", "-", "1", ")", ")", ")", ",", "(", "[", "2.99792458e+19", ",", "599584916000000.0", ",", "299792458000000.0", "]", ",", "u", ".", "Hz", ")", ",", "(", "[", "1.98644568e-14", ",", "3.97289137e-19", ",", "1.98644568e-19", "]", ",", "u", ".", "J", ")", "]", ")", "def", "test_spectral4", "(", "in_val", ",", "in_unit", ")", ":", "out_units", "=", "[", "(", "u", ".", "micron", "**", "(", "-", "1", ")", ")", ",", "(", "u", ".", "radian", "/", "u", ".", "micron", ")", "]", "answers", "=", "[", "[", "100000.0", ",", "2.0", ",", "1.0", "]", ",", "[", "628318.531", ",", "12.5663706", ",", "6.28318531", "]", "]", "for", "(", "out_unit", ",", "ans", ")", "in", "zip", "(", "out_units", ",", "answers", ")", ":", "a", "=", "in_unit", ".", "to", "(", "out_unit", ",", "in_val", ",", "u", ".", "spectral", "(", ")", ")", "assert_allclose", "(", "a", ",", "ans", ")", "b", "=", "out_unit", ".", "to", "(", "in_unit", ",", "ans", ",", "u", ".", "spectral", "(", ")", ")", "assert_allclose", "(", "b", ",", "in_val", ")" ]
wave number conversion w .
train
false
9,017
def atomic_group_add_labels(id, labels): label_objs = models.Label.smart_get_bulk(labels) models.AtomicGroup.smart_get(id).label_set.add(*label_objs)
[ "def", "atomic_group_add_labels", "(", "id", ",", "labels", ")", ":", "label_objs", "=", "models", ".", "Label", ".", "smart_get_bulk", "(", "labels", ")", "models", ".", "AtomicGroup", ".", "smart_get", "(", "id", ")", ".", "label_set", ".", "add", "(", "*", "label_objs", ")" ]
add labels to atomic group .
train
false
9,018
def get_pyos_inputhook(): return ctypes.c_void_p.in_dll(ctypes.pythonapi, 'PyOS_InputHook')
[ "def", "get_pyos_inputhook", "(", ")", ":", "return", "ctypes", ".", "c_void_p", ".", "in_dll", "(", "ctypes", ".", "pythonapi", ",", "'PyOS_InputHook'", ")" ]
return the current pyos_inputhook as a ctypes .
train
false
9,019
def GenApiConfig(service_class_names, generator=None, hostname=None): service_map = {} generator = (generator or api_config.ApiConfigGenerator()) for service_class_name in service_class_names: (module_name, base_service_class_name) = service_class_name.rsplit('.', 1) module = __import__(module_name, fromlist=base_service_class_name) service = getattr(module, base_service_class_name) if (not (isinstance(service, type) and issubclass(service, remote.Service))): raise TypeError(('%s is not a ProtoRPC service' % service_class_name)) hostname = (service.api_info.hostname or hostname) service_map[service_class_name] = generator.pretty_print_config_to_json(service, hostname=hostname) return service_map
[ "def", "GenApiConfig", "(", "service_class_names", ",", "generator", "=", "None", ",", "hostname", "=", "None", ")", ":", "service_map", "=", "{", "}", "generator", "=", "(", "generator", "or", "api_config", ".", "ApiConfigGenerator", "(", ")", ")", "for", "service_class_name", "in", "service_class_names", ":", "(", "module_name", ",", "base_service_class_name", ")", "=", "service_class_name", ".", "rsplit", "(", "'.'", ",", "1", ")", "module", "=", "__import__", "(", "module_name", ",", "fromlist", "=", "base_service_class_name", ")", "service", "=", "getattr", "(", "module", ",", "base_service_class_name", ")", "if", "(", "not", "(", "isinstance", "(", "service", ",", "type", ")", "and", "issubclass", "(", "service", ",", "remote", ".", "Service", ")", ")", ")", ":", "raise", "TypeError", "(", "(", "'%s is not a ProtoRPC service'", "%", "service_class_name", ")", ")", "hostname", "=", "(", "service", ".", "api_info", ".", "hostname", "or", "hostname", ")", "service_map", "[", "service_class_name", "]", "=", "generator", ".", "pretty_print_config_to_json", "(", "service", ",", "hostname", "=", "hostname", ")", "return", "service_map" ]
write an api configuration for endpoints annotated protorpc services .
train
false
9,024
def get_logger_name_for_module(module): module_file = module.__file__ base_dir = os.path.dirname(os.path.abspath(module_file)) module_name = os.path.basename(module_file) module_name = module_name.replace('.pyc', '').replace('.py', '') split = base_dir.split(os.path.sep) split = [component for component in split if component] start_index = 0 for (index, component) in enumerate(reversed(split)): if component.startswith('st2'): start_index = ((len(split) - 1) - index) break split = split[start_index:] name = (('.'.join(split) + '.') + module_name) return name
[ "def", "get_logger_name_for_module", "(", "module", ")", ":", "module_file", "=", "module", ".", "__file__", "base_dir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "module_file", ")", ")", "module_name", "=", "os", ".", "path", ".", "basename", "(", "module_file", ")", "module_name", "=", "module_name", ".", "replace", "(", "'.pyc'", ",", "''", ")", ".", "replace", "(", "'.py'", ",", "''", ")", "split", "=", "base_dir", ".", "split", "(", "os", ".", "path", ".", "sep", ")", "split", "=", "[", "component", "for", "component", "in", "split", "if", "component", "]", "start_index", "=", "0", "for", "(", "index", ",", "component", ")", "in", "enumerate", "(", "reversed", "(", "split", ")", ")", ":", "if", "component", ".", "startswith", "(", "'st2'", ")", ":", "start_index", "=", "(", "(", "len", "(", "split", ")", "-", "1", ")", "-", "index", ")", "break", "split", "=", "split", "[", "start_index", ":", "]", "name", "=", "(", "(", "'.'", ".", "join", "(", "split", ")", "+", "'.'", ")", "+", "module_name", ")", "return", "name" ]
retrieve fully qualified logger name for current module :type: str .
train
false
9,025
def get_ami_metadata(): res = {} base_url = 'http://169.254.169.254/latest/meta-data' instance_id = run(('curl %s/instance-id' % base_url)) assert re.match('i-[0-9a-f]{8}', instance_id) res['instance-id'] = instance_id return res
[ "def", "get_ami_metadata", "(", ")", ":", "res", "=", "{", "}", "base_url", "=", "'http://169.254.169.254/latest/meta-data'", "instance_id", "=", "run", "(", "(", "'curl %s/instance-id'", "%", "base_url", ")", ")", "assert", "re", ".", "match", "(", "'i-[0-9a-f]{8}'", ",", "instance_id", ")", "res", "[", "'instance-id'", "]", "=", "instance_id", "return", "res" ]
fetch ami metadata for the local instance .
train
false
9,026
def running_under_virtualenv(): return hasattr(sys, 'real_prefix')
[ "def", "running_under_virtualenv", "(", ")", ":", "return", "hasattr", "(", "sys", ",", "'real_prefix'", ")" ]
return true if were running inside a virtualenv .
train
false
9,027
def _build_element_wise_ufunc_wrapper(cres, signature): ctx = cres.target_context library = cres.library fname = cres.fndesc.llvm_func_name env = None if cres.objectmode: env = cres.environment assert (env is not None) ll_intp = cres.target_context.get_value_type(types.intp) ll_pyobj = cres.target_context.get_value_type(types.pyobject) envptr = lc.Constant.int(ll_intp, id(env)).inttoptr(ll_pyobj) else: envptr = None ptr = build_ufunc_wrapper(library, ctx, fname, signature, cres.objectmode, envptr, env) dtypenums = [as_dtype(a).num for a in signature.args] dtypenums.append(as_dtype(signature.return_type).num) return (dtypenums, ptr, env)
[ "def", "_build_element_wise_ufunc_wrapper", "(", "cres", ",", "signature", ")", ":", "ctx", "=", "cres", ".", "target_context", "library", "=", "cres", ".", "library", "fname", "=", "cres", ".", "fndesc", ".", "llvm_func_name", "env", "=", "None", "if", "cres", ".", "objectmode", ":", "env", "=", "cres", ".", "environment", "assert", "(", "env", "is", "not", "None", ")", "ll_intp", "=", "cres", ".", "target_context", ".", "get_value_type", "(", "types", ".", "intp", ")", "ll_pyobj", "=", "cres", ".", "target_context", ".", "get_value_type", "(", "types", ".", "pyobject", ")", "envptr", "=", "lc", ".", "Constant", ".", "int", "(", "ll_intp", ",", "id", "(", "env", ")", ")", ".", "inttoptr", "(", "ll_pyobj", ")", "else", ":", "envptr", "=", "None", "ptr", "=", "build_ufunc_wrapper", "(", "library", ",", "ctx", ",", "fname", ",", "signature", ",", "cres", ".", "objectmode", ",", "envptr", ",", "env", ")", "dtypenums", "=", "[", "as_dtype", "(", "a", ")", ".", "num", "for", "a", "in", "signature", ".", "args", "]", "dtypenums", ".", "append", "(", "as_dtype", "(", "signature", ".", "return_type", ")", ".", "num", ")", "return", "(", "dtypenums", ",", "ptr", ",", "env", ")" ]
build a wrapper for the ufunc loop entry point given by the compilation result object .
train
false