id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
29,574
def _api_del_config(name, output, kwargs): if del_from_section(kwargs): return report(output) else: return report(output, _MSG_NOT_IMPLEMENTED)
[ "def", "_api_del_config", "(", "name", ",", "output", ",", "kwargs", ")", ":", "if", "del_from_section", "(", "kwargs", ")", ":", "return", "report", "(", "output", ")", "else", ":", "return", "report", "(", "output", ",", "_MSG_NOT_IMPLEMENTED", ")" ]
api: accepts output .
train
false
29,576
def _describe_node(node): return {'id': node.id, 'name': node.name, 'provider': node.driver.name, 'creation_time': _format_time(_get_node_creation_time(node))}
[ "def", "_describe_node", "(", "node", ")", ":", "return", "{", "'id'", ":", "node", ".", "id", ",", "'name'", ":", "node", ".", "name", ",", "'provider'", ":", "node", ".", "driver", ".", "name", ",", "'creation_time'", ":", "_format_time", "(", "_get_...
create a dictionary of node details .
train
false
29,577
def div_ceil(n, d): return int(((n // d) + ((n % d) != 0)))
[ "def", "div_ceil", "(", "n", ",", "d", ")", ":", "return", "int", "(", "(", "(", "n", "//", "d", ")", "+", "(", "(", "n", "%", "d", ")", "!=", "0", ")", ")", ")" ]
the smallest integer k such that k*d >= n .
train
false
29,578
def test_octarine_http(dcos_api_session, timeout=30): test_uuid = uuid.uuid4().hex octarine_id = uuid.uuid4().hex proxy = ('"http://127.0.0.1:$(/opt/mesosphere/bin/octarine ' + '--client --port {})"'.format(octarine_id)) check_command = 'curl --fail --proxy {} marathon.mesos.mydcos.directory'.format(proxy) app_definition = {'id': '/integration-test-app-octarine-http-{}'.format(test_uuid), 'cpus': 0.1, 'mem': 128, 'ports': [0], 'cmd': '/opt/mesosphere/bin/octarine {}'.format(octarine_id), 'disk': 0, 'instances': 1, 'healthChecks': [{'protocol': 'COMMAND', 'command': {'value': check_command}, 'gracePeriodSeconds': 5, 'intervalSeconds': 10, 'timeoutSeconds': 10, 'maxConsecutiveFailures': 3}]} dcos_api_session.marathon.deploy_and_cleanup(app_definition)
[ "def", "test_octarine_http", "(", "dcos_api_session", ",", "timeout", "=", "30", ")", ":", "test_uuid", "=", "uuid", ".", "uuid4", "(", ")", ".", "hex", "octarine_id", "=", "uuid", ".", "uuid4", "(", ")", ".", "hex", "proxy", "=", "(", "'\"http://127.0.0...
test if we are able to send traffic through octarine .
train
false
29,579
def supports_carddav(url): response = requests.request('OPTIONS', url, headers={'User-Agent': USER_AGENT, 'Depth': '1'}) response.raise_for_status() if ('addressbook' not in response.headers.get('DAV', '')): raise Exception('URL is not a CardDAV resource')
[ "def", "supports_carddav", "(", "url", ")", ":", "response", "=", "requests", ".", "request", "(", "'OPTIONS'", ",", "url", ",", "headers", "=", "{", "'User-Agent'", ":", "USER_AGENT", ",", "'Depth'", ":", "'1'", "}", ")", "response", ".", "raise_for_statu...
basic verification that the endpoint supports carddav .
train
false
29,580
def RegenerateAppendFlag(flag, values, predicate, env_name, options): flags = [] if (options.use_environment and env_name): for flag_value in ShlexEnv(env_name): value = FormatOpt(flag, predicate(flag_value)) if (value in flags): flags.remove(value) flags.append(value) if values: for flag_value in values: flags.append(FormatOpt(flag, predicate(flag_value))) return flags
[ "def", "RegenerateAppendFlag", "(", "flag", ",", "values", ",", "predicate", ",", "env_name", ",", "options", ")", ":", "flags", "=", "[", "]", "if", "(", "options", ".", "use_environment", "and", "env_name", ")", ":", "for", "flag_value", "in", "ShlexEnv"...
regenerate a list of command line flags .
train
false
29,581
def list_master_symlinks(saltenv='base', prefix=''): return _client().symlink_list(saltenv, prefix)
[ "def", "list_master_symlinks", "(", "saltenv", "=", "'base'", ",", "prefix", "=", "''", ")", ":", "return", "_client", "(", ")", ".", "symlink_list", "(", "saltenv", ",", "prefix", ")" ]
list all of the symlinks stored on the master cli example: .
train
false
29,582
def strlist_intersection(a, b): temp = cidict() for elt in a: temp[elt] = elt result = [temp[elt] for elt in b if temp.has_key(elt)] return result
[ "def", "strlist_intersection", "(", "a", ",", "b", ")", ":", "temp", "=", "cidict", "(", ")", "for", "elt", "in", "a", ":", "temp", "[", "elt", "]", "=", "elt", "result", "=", "[", "temp", "[", "elt", "]", "for", "elt", "in", "b", "if", "temp",...
return intersection of two lists of case-insensitive strings a .
train
false
29,584
def get_calculator(counter_type): try: return _counter_type_calculators[counter_type] except KeyError: raise UndefinedCalculator
[ "def", "get_calculator", "(", "counter_type", ")", ":", "try", ":", "return", "_counter_type_calculators", "[", "counter_type", "]", "except", "KeyError", ":", "raise", "UndefinedCalculator" ]
return the calculator associated with the counter_type when it exists .
train
false
29,585
def conv1d_mc0(input, filters, image_shape=None, filter_shape=None, border_mode='valid', subsample=(1,), filter_flip=True): if (image_shape is None): image_shape_mc0 = None else: image_shape_mc0 = (image_shape[0], image_shape[1], 1, image_shape[2]) if (filter_shape is None): filter_shape_mc0 = None else: filter_shape_mc0 = (filter_shape[0], filter_shape[1], 1, filter_shape[2]) if isinstance(border_mode, tuple): (border_mode,) = border_mode if isinstance(border_mode, int): border_mode = (0, border_mode) input_mc0 = input.dimshuffle(0, 1, 'x', 2) filters_mc0 = filters.dimshuffle(0, 1, 'x', 2) conved = T.nnet.conv2d(input_mc0, filters_mc0, image_shape_mc0, filter_shape_mc0, subsample=(1, subsample[0]), border_mode=border_mode, filter_flip=filter_flip) return conved[:, :, 0, :]
[ "def", "conv1d_mc0", "(", "input", ",", "filters", ",", "image_shape", "=", "None", ",", "filter_shape", "=", "None", ",", "border_mode", "=", "'valid'", ",", "subsample", "=", "(", "1", ",", ")", ",", "filter_flip", "=", "True", ")", ":", "if", "(", ...
using conv2d with width == 1 .
train
false
29,586
def _wiki_request(params): global RATE_LIMIT_LAST_CALL global USER_AGENT params[u'format'] = u'json' if (not (u'action' in params)): params[u'action'] = u'query' headers = {u'User-Agent': USER_AGENT} if (RATE_LIMIT and RATE_LIMIT_LAST_CALL and ((RATE_LIMIT_LAST_CALL + RATE_LIMIT_MIN_WAIT) > datetime.now())): wait_time = ((RATE_LIMIT_LAST_CALL + RATE_LIMIT_MIN_WAIT) - datetime.now()) time.sleep(int(wait_time.total_seconds())) r = requests.get(API_URL, params=params, headers=headers) if RATE_LIMIT: RATE_LIMIT_LAST_CALL = datetime.now() return r.json()
[ "def", "_wiki_request", "(", "params", ")", ":", "global", "RATE_LIMIT_LAST_CALL", "global", "USER_AGENT", "params", "[", "u'format'", "]", "=", "u'json'", "if", "(", "not", "(", "u'action'", "in", "params", ")", ")", ":", "params", "[", "u'action'", "]", ...
make a request to the wikipedia api using the given search parameters .
train
false
29,588
def fix_win_console(encoding): if (isinstance(sys.stdout, WinUnicodeOutputBase) or isinstance(sys.stderr, WinUnicodeOutputBase)): return False try: sys.stdout = win_get_unicode_stream(sys.stdout, 1, (-11), encoding) sys.stderr = win_get_unicode_stream(sys.stderr, 2, (-12), encoding) except Exception as e: complain(('exception %r while fixing up sys.stdout and sys.stderr' % e)) return True
[ "def", "fix_win_console", "(", "encoding", ")", ":", "if", "(", "isinstance", "(", "sys", ".", "stdout", ",", "WinUnicodeOutputBase", ")", "or", "isinstance", "(", "sys", ".", "stderr", ",", "WinUnicodeOutputBase", ")", ")", ":", "return", "False", "try", ...
makes unicode console output work independently of the current code page .
train
false
29,589
def getNewRepository(): return ExportRepository()
[ "def", "getNewRepository", "(", ")", ":", "return", "ExportRepository", "(", ")" ]
get new repository .
train
false
29,590
def test_permission_file(): import tempfile from conda.common.compat import text_type with tempfile.NamedTemporaryFile(mode='r') as f: if (not isinstance(f.name, text_type)): return with FileLock(f.name) as lock: path = basename(lock.lock_file_path) assert (not exists(join(f.name, path)))
[ "def", "test_permission_file", "(", ")", ":", "import", "tempfile", "from", "conda", ".", "common", ".", "compat", "import", "text_type", "with", "tempfile", ".", "NamedTemporaryFile", "(", "mode", "=", "'r'", ")", "as", "f", ":", "if", "(", "not", "isinst...
test when lock cannot be created due to permission make sure no exception raised .
train
false
29,591
def floating_ip_get_all(context): return IMPL.floating_ip_get_all(context)
[ "def", "floating_ip_get_all", "(", "context", ")", ":", "return", "IMPL", ".", "floating_ip_get_all", "(", "context", ")" ]
get all floating ips .
train
false
29,592
def test_no_output_but_filters(): jl = bundle_to_joblist(Bundle(Bundle('s1', output='foo'), Bundle('s2', output='bar', filters=[js]), filters=[css])) assert (jl['foo'][0][0] == [css]) assert (jl['foo'][0][1] == ['s1']) assert (jl['bar'][0][0] == [js, css]) assert (jl['bar'][0][1] == ['s2'])
[ "def", "test_no_output_but_filters", "(", ")", ":", "jl", "=", "bundle_to_joblist", "(", "Bundle", "(", "Bundle", "(", "'s1'", ",", "output", "=", "'foo'", ")", ",", "Bundle", "(", "'s2'", ",", "output", "=", "'bar'", ",", "filters", "=", "[", "js", "]...
if a container specifies filters .
train
false
29,594
def getSolidMatchingPlugins(elementNode): xmlProcessor = elementNode.getXMLProcessor() matchingPlugins = evaluate.getMatchingPlugins(elementNode, xmlProcessor.manipulationMatrixDictionary) return (matchingPlugins + evaluate.getMatchingPlugins(elementNode, xmlProcessor.manipulationShapeDictionary))
[ "def", "getSolidMatchingPlugins", "(", "elementNode", ")", ":", "xmlProcessor", "=", "elementNode", ".", "getXMLProcessor", "(", ")", "matchingPlugins", "=", "evaluate", ".", "getMatchingPlugins", "(", "elementNode", ",", "xmlProcessor", ".", "manipulationMatrixDictiona...
get solid plugins in the manipulation matrix .
train
false
29,596
def get_color_from_hex(s): if s.startswith('#'): return get_color_from_hex(s[1:]) value = [(int(x, 16) / 255.0) for x in split('([0-9a-f]{2})', s.lower()) if (x != '')] if (len(value) == 3): value.append(1) return value
[ "def", "get_color_from_hex", "(", "s", ")", ":", "if", "s", ".", "startswith", "(", "'#'", ")", ":", "return", "get_color_from_hex", "(", "s", "[", "1", ":", "]", ")", "value", "=", "[", "(", "int", "(", "x", ",", "16", ")", "/", "255.0", ")", ...
transform a hex string color to a kivy :class:~kivy .
train
false
29,597
def looking_at_call(s): position = (s.start_line, s.start_col) result = (looking_at_expr(s) == u'(') if (not result): (s.start_line, s.start_col) = position return result
[ "def", "looking_at_call", "(", "s", ")", ":", "position", "=", "(", "s", ".", "start_line", ",", "s", ".", "start_col", ")", "result", "=", "(", "looking_at_expr", "(", "s", ")", "==", "u'('", ")", "if", "(", "not", "result", ")", ":", "(", "s", ...
see if were looking at a .
train
false
29,599
def create_operation(arg1, operator, arg2): if (operator not in OPERATORS): raise OperatorCreationError('unknown operator "{0}"'.format(operator)) opfunc = OPERATORS[operator] if (opfunc in (is_null, is_not_null)): return opfunc(arg1) if (arg2 is None): message = 'To compare a value to NULL, use the unary is_null/is_not_null operators.' raise OperatorCreationError(message) if (arg2 is NO_ARGUMENT): msg = 'expected an argument for this operator but none was given' raise OperatorCreationError(msg) return opfunc(arg1, arg2)
[ "def", "create_operation", "(", "arg1", ",", "operator", ",", "arg2", ")", ":", "if", "(", "operator", "not", "in", "OPERATORS", ")", ":", "raise", "OperatorCreationError", "(", "'unknown operator \"{0}\"'", ".", "format", "(", "operator", ")", ")", "opfunc", ...
creates a sqlalchemy expression for the given operation .
train
false
29,600
@jit(nopython=True) def _generate_sample_paths(P_cdfs, init_states, random_values, out): (num_reps, ts_length) = out.shape for i in range(num_reps): out[(i, 0)] = init_states[i] for t in range((ts_length - 1)): out[(i, (t + 1))] = searchsorted(P_cdfs[out[(i, t)]], random_values[(i, t)])
[ "@", "jit", "(", "nopython", "=", "True", ")", "def", "_generate_sample_paths", "(", "P_cdfs", ",", "init_states", ",", "random_values", ",", "out", ")", ":", "(", "num_reps", ",", "ts_length", ")", "=", "out", ".", "shape", "for", "i", "in", "range", ...
generate num_reps sample paths of length ts_length .
train
true
29,601
def dup_pquo(f, g, K): return dup_pdiv(f, g, K)[0]
[ "def", "dup_pquo", "(", "f", ",", "g", ",", "K", ")", ":", "return", "dup_pdiv", "(", "f", ",", "g", ",", "K", ")", "[", "0", "]" ]
polynomial exact pseudo-quotient in k[x] .
train
false
29,604
def IS_CHARACTER_JUNK(ch, ws=' DCTB '): return (ch in ws)
[ "def", "IS_CHARACTER_JUNK", "(", "ch", ",", "ws", "=", "' DCTB '", ")", ":", "return", "(", "ch", "in", "ws", ")" ]
return 1 for ignorable character: iff ch is a space or tab .
train
false
29,605
def _parse_hostname(): contents = _read_file(_DEB_HOSTNAME_FILE) if contents: return contents[0].split('\n')[0] else: return ''
[ "def", "_parse_hostname", "(", ")", ":", "contents", "=", "_read_file", "(", "_DEB_HOSTNAME_FILE", ")", "if", "contents", ":", "return", "contents", "[", "0", "]", ".", "split", "(", "'\\n'", ")", "[", "0", "]", "else", ":", "return", "''" ]
parse /etc/hostname and return hostname .
train
false
29,606
def test_dummy_user_service_current_user(): user = XBlockUser(full_name='tester') user_service = SingleUserService(user) current_user = user_service.get_current_user() assert_equals(current_user, user) assert_equals(current_user.full_name, 'tester') assert_is_instance(current_user.emails, collections.Iterable) assert_false(isinstance(current_user.emails, basestring)) assert_is_instance(current_user.opt_attrs, collections.Mapping)
[ "def", "test_dummy_user_service_current_user", "(", ")", ":", "user", "=", "XBlockUser", "(", "full_name", "=", "'tester'", ")", "user_service", "=", "SingleUserService", "(", "user", ")", "current_user", "=", "user_service", ".", "get_current_user", "(", ")", "as...
tests that get_current_user() works on a dummy user service .
train
false
29,607
def get_output_dir(imdb, net=None): outdir = osp.abspath(osp.join(__C.ROOT_DIR, 'output', __C.EXP_DIR, imdb.name)) if (net is not None): outdir = osp.join(outdir, net.name) if (not os.path.exists(outdir)): os.makedirs(outdir) return outdir
[ "def", "get_output_dir", "(", "imdb", ",", "net", "=", "None", ")", ":", "outdir", "=", "osp", ".", "abspath", "(", "osp", ".", "join", "(", "__C", ".", "ROOT_DIR", ",", "'output'", ",", "__C", ".", "EXP_DIR", ",", "imdb", ".", "name", ")", ")", ...
return the directory where experimental artifacts are placed .
train
false
29,609
def get_agent_types_by_host(context, host): agent_types = [] core_plugin = directory.get_plugin() if utils.is_extension_supported(core_plugin, 'agent'): agents = core_plugin.get_agents(context.elevated(), filters={'host': [host]}) agent_types = [a['agent_type'] for a in agents] return agent_types
[ "def", "get_agent_types_by_host", "(", "context", ",", "host", ")", ":", "agent_types", "=", "[", "]", "core_plugin", "=", "directory", ".", "get_plugin", "(", ")", "if", "utils", ".", "is_extension_supported", "(", "core_plugin", ",", "'agent'", ")", ":", "...
return the agent types registered on the host .
train
false
29,610
@contextlib.contextmanager def indent_log(num=2): _log_state.indentation += num try: (yield) finally: _log_state.indentation -= num
[ "@", "contextlib", ".", "contextmanager", "def", "indent_log", "(", "num", "=", "2", ")", ":", "_log_state", ".", "indentation", "+=", "num", "try", ":", "(", "yield", ")", "finally", ":", "_log_state", ".", "indentation", "-=", "num" ]
a context manager which will cause the log output to be indented for any log messages emitted inside it .
train
false
29,611
@register.tag(name='include_raw') def do_include_raw(parser, token): bits = token.split_contents() if (len(bits) != 2): raise template.TemplateSyntaxError(('%r tag takes one argument: the name of the template to be included' % bits[0])) template_name = bits[1] if ((template_name[0] in ('"', "'")) and (template_name[(-1)] == template_name[0])): template_name = template_name[1:(-1)] (source, __) = get_template_source(template_name) return template.base.TextNode(source)
[ "@", "register", ".", "tag", "(", "name", "=", "'include_raw'", ")", "def", "do_include_raw", "(", "parser", ",", "token", ")", ":", "bits", "=", "token", ".", "split_contents", "(", ")", "if", "(", "len", "(", "bits", ")", "!=", "2", ")", ":", "ra...
performs a template include without parsing the context .
train
false
29,612
def test_quantity_pickelability(): q1 = (np.arange(10) * u.m) q2 = pickle.loads(pickle.dumps(q1)) assert np.all((q1.value == q2.value)) assert q1.unit.is_equivalent(q2.unit) assert (q1.unit == q2.unit)
[ "def", "test_quantity_pickelability", "(", ")", ":", "q1", "=", "(", "np", ".", "arange", "(", "10", ")", "*", "u", ".", "m", ")", "q2", "=", "pickle", ".", "loads", "(", "pickle", ".", "dumps", "(", "q1", ")", ")", "assert", "np", ".", "all", ...
testing pickleability of quantity .
train
false
29,613
def log_mask_zero(a): a = np.asarray(a) with np.errstate(divide='ignore'): a_log = np.log(a) a_log[(a <= 0)] = 0.0 return a_log
[ "def", "log_mask_zero", "(", "a", ")", ":", "a", "=", "np", ".", "asarray", "(", "a", ")", "with", "np", ".", "errstate", "(", "divide", "=", "'ignore'", ")", ":", "a_log", "=", "np", ".", "log", "(", "a", ")", "a_log", "[", "(", "a", "<=", "...
computes the log of input probabilities masking divide by zero in log .
train
true
29,614
def hipchat(registry, xml_parent, data): hipchat = XML.SubElement(xml_parent, 'jenkins.plugins.hipchat.HipChatNotifier') XML.SubElement(hipchat, 'token').text = str(data.get('token', '')) if ('rooms' in data): XML.SubElement(hipchat, 'room').text = str(','.join(data['rooms'])) mapping = [('notify-start', 'startNotification', False), ('notify-success', 'notifySuccess', False), ('notify-aborted', 'notifyAborted', False), ('notify-not-built', 'notifyNotBuilt', False), ('notify-unstable', 'notifyUnstable', False), ('notify-failure', 'notifyFailure', False), ('notify-back-to-normal', 'notifyBackToNormal', False)] helpers.convert_mapping_to_xml(hipchat, data, mapping, fail_required=True) if ('start-message' in data): XML.SubElement(hipchat, 'startJobMessage').text = str(data['start-message']) if ('complete-message' in data): XML.SubElement(hipchat, 'completeJobMessage').text = str(data['complete-message'])
[ "def", "hipchat", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "hipchat", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'jenkins.plugins.hipchat.HipChatNotifier'", ")", "XML", ".", "SubElement", "(", "hipchat", ",", "'token'", ")", ".",...
yaml: hipchat publisher that sends hipchat notifications on job events requires the jenkins :jenkins-wiki:hipchat plugin <hipchat+plugin> version >=1 .
train
false
29,615
def test_stackedblocks_without_params(): sb = StackedBlocks([Block(), Block()]) assert (sb._params is None)
[ "def", "test_stackedblocks_without_params", "(", ")", ":", "sb", "=", "StackedBlocks", "(", "[", "Block", "(", ")", ",", "Block", "(", ")", "]", ")", "assert", "(", "sb", ".", "_params", "is", "None", ")" ]
test stackedblocks when not all layers have trainable params .
train
false
29,616
def _onenormest_matrix_power(A, p, t=2, itmax=5, compute_v=False, compute_w=False, structure=None): return scipy.sparse.linalg.onenormest(MatrixPowerOperator(A, p, structure=structure))
[ "def", "_onenormest_matrix_power", "(", "A", ",", "p", ",", "t", "=", "2", ",", "itmax", "=", "5", ",", "compute_v", "=", "False", ",", "compute_w", "=", "False", ",", "structure", "=", "None", ")", ":", "return", "scipy", ".", "sparse", ".", "linalg...
efficiently estimate the 1-norm of a^p .
train
false
29,617
def variant(name, user=None): if (not user): user = _world.current_user() return _get_featurestate(name).variant(user)
[ "def", "variant", "(", "name", ",", "user", "=", "None", ")", ":", "if", "(", "not", "user", ")", ":", "user", "=", "_world", ".", "current_user", "(", ")", "return", "_get_featurestate", "(", "name", ")", ".", "variant", "(", "user", ")" ]
return which variant of an experiment a user is part of .
train
false
29,619
def check_for_openmp(): tmpdir = tempfile.mkdtemp() curdir = os.getcwd() exit_code = 1 if (os.name == 'nt'): return False try: os.chdir(tmpdir) compiler = os.getenv('CC', 'cc') filename = 'test.c' source = open(filename, 'wt', 1) source.write('\n #include <omp.h>\n #include <stdio.h>\n int main() {\n #pragma omp parallel\n printf("Hello from thread %d, nthreads %d",\n omp_get_thread_num(), omp_get_num_threads());\n }\n ') with open(os.devnull, 'w') as fnull: exit_code = subprocess.call([compiler, '-fopenmp', filename], stdout=fnull, stderr=fnull) source.close() finally: os.chdir(curdir) shutil.rmtree(tmpdir) return (exit_code == 0)
[ "def", "check_for_openmp", "(", ")", ":", "tmpdir", "=", "tempfile", ".", "mkdtemp", "(", ")", "curdir", "=", "os", ".", "getcwd", "(", ")", "exit_code", "=", "1", "if", "(", "os", ".", "name", "==", "'nt'", ")", ":", "return", "False", "try", ":",...
check for openmp support .
train
false
29,620
def dnn_gradinput(kerns, topgrad, img_shp, border_mode='valid', subsample=(1, 1), conv_mode='conv'): kerns = gpu_contiguous(kerns) topgrad = gpu_contiguous(topgrad) img_shp = theano.tensor.as_tensor_variable(img_shp) desc = GpuDnnConvDesc(border_mode=border_mode, subsample=subsample, conv_mode=conv_mode)(img_shp, kerns.shape) out = gpu_alloc_empty(*img_shp) return GpuDnnConvGradI()(kerns, topgrad, out, desc)
[ "def", "dnn_gradinput", "(", "kerns", ",", "topgrad", ",", "img_shp", ",", "border_mode", "=", "'valid'", ",", "subsample", "=", "(", "1", ",", "1", ")", ",", "conv_mode", "=", "'conv'", ")", ":", "kerns", "=", "gpu_contiguous", "(", "kerns", ")", "top...
gpu convolution gradient with respect to input using cudnn from nvidia .
train
false
29,621
def _utc_timestamp(datetime_obj): return datetime_obj.strftime(UTC_TIMESTAMP_FORMAT)
[ "def", "_utc_timestamp", "(", "datetime_obj", ")", ":", "return", "datetime_obj", ".", "strftime", "(", "UTC_TIMESTAMP_FORMAT", ")" ]
return string of datetime_obj in the utc timestamp format .
train
false
29,623
@pytest.mark.django_db def test_category_links_plugin_show_all(rf): category = get_default_category() category.status = CategoryStatus.VISIBLE category.shops.add(get_default_shop()) category.save() context = get_context(rf) plugin = CategoryLinksPlugin({'show_all_categories': False}) assert context['request'].customer.is_anonymous assert (context['request'].shop in category.shops.all()) assert (not plugin.get_context_data(context)['categories']) plugin = CategoryLinksPlugin({'show_all_categories': True}) assert (category in plugin.get_context_data(context)['categories'])
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_category_links_plugin_show_all", "(", "rf", ")", ":", "category", "=", "get_default_category", "(", ")", "category", ".", "status", "=", "CategoryStatus", ".", "VISIBLE", "category", ".", "shops", ".", ...
test that show_all_categories forces plugin to return all visible categories .
train
false
29,624
def getProfileString(): p = [] alt = [] global settingsList for set in settingsList: if set.isProfile(): if (set.getName() in tempOverride): p.append(((set.getName() + '=') + tempOverride[set.getName()])) else: p.append(((set.getName() + '=') + set.getValue().encode('utf-8'))) elif set.isAlteration(): if (set.getName() in tempOverride): alt.append(((set.getName() + '=') + tempOverride[set.getName()])) else: alt.append(((set.getName() + '=') + set.getValue().encode('utf-8'))) ret = (('\x08'.join(p) + '\x0c') + '\x08'.join(alt)) ret = base64.b64encode(zlib.compress(ret, 9)) return ret
[ "def", "getProfileString", "(", ")", ":", "p", "=", "[", "]", "alt", "=", "[", "]", "global", "settingsList", "for", "set", "in", "settingsList", ":", "if", "set", ".", "isProfile", "(", ")", ":", "if", "(", "set", ".", "getName", "(", ")", "in", ...
get an encoded string which contains all profile settings .
train
false
29,626
def class_mapper(class_, configure=True): mapper = _inspect_mapped_class(class_, configure=configure) if (mapper is None): if (not isinstance(class_, type)): raise sa_exc.ArgumentError(("Class object expected, got '%r'." % (class_,))) raise exc.UnmappedClassError(class_) else: return mapper
[ "def", "class_mapper", "(", "class_", ",", "configure", "=", "True", ")", ":", "mapper", "=", "_inspect_mapped_class", "(", "class_", ",", "configure", "=", "configure", ")", "if", "(", "mapper", "is", "None", ")", ":", "if", "(", "not", "isinstance", "(...
given a class .
train
false
29,628
def get_replacements(): replacements = [] for (pattern, repl) in config['replace'].get(dict).items(): repl = (repl or '') try: replacements.append((re.compile(pattern), repl)) except re.error: raise UserError(u'malformed regular expression in replace: {0}'.format(pattern)) return replacements
[ "def", "get_replacements", "(", ")", ":", "replacements", "=", "[", "]", "for", "(", "pattern", ",", "repl", ")", "in", "config", "[", "'replace'", "]", ".", "get", "(", "dict", ")", ".", "items", "(", ")", ":", "repl", "=", "(", "repl", "or", "'...
confit validation function that reads regex/string pairs .
train
false
29,629
def datetime2epoch(dt): if (dt is not None): return calendar.timegm(dt.utctimetuple())
[ "def", "datetime2epoch", "(", "dt", ")", ":", "if", "(", "dt", "is", "not", "None", ")", ":", "return", "calendar", ".", "timegm", "(", "dt", ".", "utctimetuple", "(", ")", ")" ]
convert a non-naive datetime object to a unix epoch timestamp .
train
false
29,630
@doctest_depends_on(exe=('latex', 'dvipng'), modules=('pyglet',)) def preview_diagram(diagram, masked=None, diagram_format='', groups=None, output='png', viewer=None, euler=True, **hints): from sympy.printing import preview latex_output = xypic_draw_diagram(diagram, masked, diagram_format, groups, **hints) preview(latex_output, output, viewer, euler, ('xypic',))
[ "@", "doctest_depends_on", "(", "exe", "=", "(", "'latex'", ",", "'dvipng'", ")", ",", "modules", "=", "(", "'pyglet'", ",", ")", ")", "def", "preview_diagram", "(", "diagram", ",", "masked", "=", "None", ",", "diagram_format", "=", "''", ",", "groups", ...
combines the functionality of xypic_draw_diagram and sympy .
train
false
29,631
def dir_hash(dir_name): if (not os.path.isdir(dir_name)): raise TypeError('{} is not a directory.'.format(dir_name)) md5 = hashlib.md5() for (root, dirs, files) in os.walk(dir_name, topdown=True): if (not re.search('/\\.', root)): for f in files: if ((not f.startswith('.')) and (not re.search('/\\.', f))): file_name = os.path.join(root, f) md5.update(hashlib.md5(file_name).hexdigest()) data = open(file_name, 'rb').read() md5.update(hashlib.md5(data).hexdigest()) return md5.hexdigest()
[ "def", "dir_hash", "(", "dir_name", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "dir_name", ")", ")", ":", "raise", "TypeError", "(", "'{} is not a directory.'", ".", "format", "(", "dir_name", ")", ")", "md5", "=", "hashlib", "."...
return a hash for the files in a directory tree .
train
false
29,632
def hello6(): response.flash = 'Hello World in a flash!' return dict(message=T('Hello World'))
[ "def", "hello6", "(", ")", ":", "response", ".", "flash", "=", "'Hello World in a flash!'", "return", "dict", "(", "message", "=", "T", "(", "'Hello World'", ")", ")" ]
page rendered with a flash .
train
false
29,633
def guard_quota_size(): BPSMeter.do.change_quota()
[ "def", "guard_quota_size", "(", ")", ":", "BPSMeter", ".", "do", ".", "change_quota", "(", ")" ]
callback for change of quota_size .
train
false
29,636
def add_special_arithmetic_methods(cls, arith_method=None, comp_method=None, bool_method=None, use_numexpr=True, force=False, select=None, exclude=None, have_divmod=False): new_methods = _create_methods(arith_method, comp_method, bool_method, use_numexpr, default_axis=None, special=True, have_divmod=have_divmod) def _wrap_inplace_method(method): '\n return an inplace wrapper for this method\n ' def f(self, other): result = method(self, other) self._update_inplace(result.reindex_like(self, copy=False)._data, verify_is_copy=False) return self return f new_methods.update(dict(__iadd__=_wrap_inplace_method(new_methods['__add__']), __isub__=_wrap_inplace_method(new_methods['__sub__']), __imul__=_wrap_inplace_method(new_methods['__mul__']), __itruediv__=_wrap_inplace_method(new_methods['__truediv__']), __ipow__=_wrap_inplace_method(new_methods['__pow__']))) if (not compat.PY3): new_methods['__idiv__'] = new_methods['__div__'] add_methods(cls, new_methods=new_methods, force=force, select=select, exclude=exclude)
[ "def", "add_special_arithmetic_methods", "(", "cls", ",", "arith_method", "=", "None", ",", "comp_method", "=", "None", ",", "bool_method", "=", "None", ",", "use_numexpr", "=", "True", ",", "force", "=", "False", ",", "select", "=", "None", ",", "exclude", ...
adds the full suite of special arithmetic methods to the class .
train
false
29,637
def _reGroupDict(d, newgr): r = {} newgrks = newgr.keys() for (k, v) in d.items(): if (k in newgrks): r.setdefault(newgr[k][0], {})[newgr[k][1]] = v else: r[k] = v return r
[ "def", "_reGroupDict", "(", "d", ",", "newgr", ")", ":", "r", "=", "{", "}", "newgrks", "=", "newgr", ".", "keys", "(", ")", "for", "(", "k", ",", "v", ")", "in", "d", ".", "items", "(", ")", ":", "if", "(", "k", "in", "newgrks", ")", ":", ...
regroup keys in the d dictionary in subdictionaries .
train
false
29,638
def mask_matches(nick, mask): nick = strings.lower(nick) mask = strings.lower(mask) mask = mask.replace('\\', '\\\\') for ch in '.$|[](){}+': mask = mask.replace(ch, ('\\' + ch)) mask = mask.replace('?', '.') mask = mask.replace('*', '.*') r = re.compile(mask, re.IGNORECASE) return r.match(nick)
[ "def", "mask_matches", "(", "nick", ",", "mask", ")", ":", "nick", "=", "strings", ".", "lower", "(", "nick", ")", "mask", "=", "strings", ".", "lower", "(", "mask", ")", "mask", "=", "mask", ".", "replace", "(", "'\\\\'", ",", "'\\\\\\\\'", ")", "...
check if a nick matches a mask .
train
false
29,639
def pollute(names, objects): from inspect import currentframe frame = currentframe().f_back.f_back try: for (name, obj) in zip(names, objects): frame.f_globals[name] = obj finally: del frame
[ "def", "pollute", "(", "names", ",", "objects", ")", ":", "from", "inspect", "import", "currentframe", "frame", "=", "currentframe", "(", ")", ".", "f_back", ".", "f_back", "try", ":", "for", "(", "name", ",", "obj", ")", "in", "zip", "(", "names", "...
pollute the global namespace with symbols -> objects mapping .
train
false
29,640
def linkIntfs(node1, node2): link = Link(node1, node2) return (link.intf1, link.intf2)
[ "def", "linkIntfs", "(", "node1", ",", "node2", ")", ":", "link", "=", "Link", "(", "node1", ",", "node2", ")", "return", "(", "link", ".", "intf1", ",", "link", ".", "intf2", ")" ]
create link from node1 to node2 and return intfs .
train
false
29,643
def text3d(ax, xyz, s, zdir='z', size=None, angle=0, usetex=False, **kwargs): (x, y, z) = xyz if (zdir == 'y'): (xy1, z1) = ((x, z), y) elif (zdir == 'y'): (xy1, z1) = ((y, z), x) else: (xy1, z1) = ((x, y), z) text_path = TextPath((0, 0), s, size=size, usetex=usetex) trans = Affine2D().rotate(angle).translate(xy1[0], xy1[1]) p1 = PathPatch(trans.transform_path(text_path), **kwargs) ax.add_patch(p1) art3d.pathpatch_2d_to_3d(p1, z=z1, zdir=zdir)
[ "def", "text3d", "(", "ax", ",", "xyz", ",", "s", ",", "zdir", "=", "'z'", ",", "size", "=", "None", ",", "angle", "=", "0", ",", "usetex", "=", "False", ",", "**", "kwargs", ")", ":", "(", "x", ",", "y", ",", "z", ")", "=", "xyz", "if", ...
plots the string s on the axes ax .
train
false
29,644
@pytest.mark.network def test_should_not_install_always_from_cache(script): script.pip('install', 'INITools==0.2', expect_error=True) script.pip('uninstall', '-y', 'INITools') result = script.pip('install', 'INITools==0.1', expect_error=True) assert (((script.site_packages / 'INITools-0.2-py%s.egg-info') % pyversion) not in result.files_created) assert (((script.site_packages / 'INITools-0.1-py%s.egg-info') % pyversion) in result.files_created)
[ "@", "pytest", ".", "mark", ".", "network", "def", "test_should_not_install_always_from_cache", "(", "script", ")", ":", "script", ".", "pip", "(", "'install'", ",", "'INITools==0.2'", ",", "expect_error", "=", "True", ")", "script", ".", "pip", "(", "'uninsta...
if there is an old cached package .
train
false
29,645
def unicoder(p, force=False): if isinstance(p, unicode): return p if isinstance(p, str): if (gUTF or force): try: return p.decode('utf-8') except: return p.decode(codepage, 'replace') return p.decode(codepage, 'replace') else: return unicode(str(p))
[ "def", "unicoder", "(", "p", ",", "force", "=", "False", ")", ":", "if", "isinstance", "(", "p", ",", "unicode", ")", ":", "return", "p", "if", "isinstance", "(", "p", ",", "str", ")", ":", "if", "(", "gUTF", "or", "force", ")", ":", "try", ":"...
make sure a unicode string is returned when force is true .
train
false
29,647
def itertext(elem): if elem.text: (yield elem.text) for e in elem: for s in itertext(e): (yield s) if e.tail: (yield e.tail)
[ "def", "itertext", "(", "elem", ")", ":", "if", "elem", ".", "text", ":", "(", "yield", "elem", ".", "text", ")", "for", "e", "in", "elem", ":", "for", "s", "in", "itertext", "(", "e", ")", ":", "(", "yield", "s", ")", "if", "e", ".", "tail",...
loop through all children and return text only .
train
false
29,650
def installation(request): return render(request, 'ecosystem/installation.html', {'page': 'installation', 'category': 'publish'})
[ "def", "installation", "(", "request", ")", ":", "return", "render", "(", "request", ",", "'ecosystem/installation.html'", ",", "{", "'page'", ":", "'installation'", ",", "'category'", ":", "'publish'", "}", ")" ]
landing page for installation .
train
false
29,651
def testDbConnection(host, port, user, passwd): try: conn = pymysql.connect(host=host, port=port, user=user, passwd=passwd) cursor = conn.cursor() cursor.execute('CREATE DATABASE IF NOT EXISTS nupic_db_test') conn.select_db('nupic_db_test') cursor.execute('CREATE TABLE db_test (teststring VARCHAR(255), someint INT)') cursor.execute("INSERT INTO db_test VALUES ('testing123', 123)") cursor.execute('DROP TABLE IF EXISTS db_test') cursor.execute('DROP DATABASE IF EXISTS nupic_db_test') return True except pymysql.err.OperationalError: return False
[ "def", "testDbConnection", "(", "host", ",", "port", ",", "user", ",", "passwd", ")", ":", "try", ":", "conn", "=", "pymysql", ".", "connect", "(", "host", "=", "host", ",", "port", "=", "port", ",", "user", "=", "user", ",", "passwd", "=", "passwd...
determine if the specified host .
train
false
29,652
def testAuthentication(host=None, username=None, password=None, apikey=None): params = {} params['mode'] = 'queue' params['output'] = 'json' params['ma_username'] = username params['ma_password'] = password params['apikey'] = apikey url = ((host + 'api?') + urllib.urlencode(params)) logger.log((u'SABnzbd test URL: ' + url), logger.DEBUG) (result, f) = _sabURLOpenSimple(url) if (not result): return (False, f) (result, sabText) = _checkSabResponse(f) if (not result): return (False, sabText) return (True, 'Success')
[ "def", "testAuthentication", "(", "host", "=", "None", ",", "username", "=", "None", ",", "password", "=", "None", ",", "apikey", "=", "None", ")", ":", "params", "=", "{", "}", "params", "[", "'mode'", "]", "=", "'queue'", "params", "[", "'output'", ...
sends a simple api request to sab to determine if the given connection information is connect .
train
false
29,654
def fbthrift_library(name, srcs=[], deps=[], optimize=[], deprecated=False, **kwargs): fbthrift_library_target = FBThriftLibrary(name, srcs, deps, optimize, deprecated, blade.blade, kwargs) blade.blade.register_target(fbthrift_library_target)
[ "def", "fbthrift_library", "(", "name", ",", "srcs", "=", "[", "]", ",", "deps", "=", "[", "]", ",", "optimize", "=", "[", "]", ",", "deprecated", "=", "False", ",", "**", "kwargs", ")", ":", "fbthrift_library_target", "=", "FBThriftLibrary", "(", "nam...
fbthrift_library target .
train
false
29,655
def cumfreq(a, numbins=10, defaultreallimits=None, weights=None): (h, l, b, e) = _histogram(a, numbins, defaultreallimits, weights=weights) cumhist = np.cumsum((h * 1), axis=0) return CumfreqResult(cumhist, l, b, e)
[ "def", "cumfreq", "(", "a", ",", "numbins", "=", "10", ",", "defaultreallimits", "=", "None", ",", "weights", "=", "None", ")", ":", "(", "h", ",", "l", ",", "b", ",", "e", ")", "=", "_histogram", "(", "a", ",", "numbins", ",", "defaultreallimits",...
returns a cumulative frequency histogram .
train
false
29,656
def _stop_timers(canvas): for attr in dir(canvas): try: attr_obj = getattr(canvas, attr) except NotImplementedError: attr_obj = None if isinstance(attr_obj, Timer): attr_obj.stop()
[ "def", "_stop_timers", "(", "canvas", ")", ":", "for", "attr", "in", "dir", "(", "canvas", ")", ":", "try", ":", "attr_obj", "=", "getattr", "(", "canvas", ",", "attr", ")", "except", "NotImplementedError", ":", "attr_obj", "=", "None", "if", "isinstance...
stop all timers in a canvas .
train
true
29,657
def create_instance(options): project = get_project(options) print 'Creating instance {project}/{zone}/{instance}'.format(project=project, zone=get_zone(options), instance=options.instance) print ' with --machine_type={type} and --disk_size={disk_size}...'.format(type=options.machine_type, disk_size=options.disk_size) google_dev_dir = os.path.join(os.path.dirname(__file__), '../google/dev') dev_dir = os.path.dirname(__file__) project_dir = os.path.join(dev_dir, '..') install_dir = '{dir}/../install'.format(dir=dev_dir) startup_command = ['/opt/spinnaker/install/install_spinnaker.sh --dependencies_only', '/opt/spinnaker/install/install_development.sh'] (fd, temp_startup) = tempfile.mkstemp() os.write(fd, ';'.join(startup_command)) os.close(fd) metadata_files = ['startup-script={google_dev_dir}/google_install_loader.py,sh_bootstrap_dev={dev_dir}/bootstrap_dev.sh,sh_install_spinnaker={project_dir}/InstallSpinnaker.sh,sh_install_development={dev_dir}/install_development.sh,startup_command={temp_startup}'.format(google_dev_dir=google_dev_dir, dev_dir=dev_dir, project_dir=project_dir, temp_startup=temp_startup)] metadata = ','.join(['startup_loader_files=sh_install_spinnaker+sh_install_development+sh_bootstrap_dev']) command = ['gcloud', 'compute', 'instances', 'create', options.instance, '--project', get_project(options), '--zone', get_zone(options), '--machine-type', options.machine_type, '--image', 'ubuntu-14-04', '--scopes', 'compute-rw,storage-rw', '--boot-disk-size={size}'.format(size=options.disk_size), '--boot-disk-type={type}'.format(type=options.disk_type), '--metadata', metadata, '--metadata-from-file={files}'.format(files=','.join(metadata_files))] if options.address: command.extend(['--address', options.address]) check_run_quick(' '.join(command), echo=False)
[ "def", "create_instance", "(", "options", ")", ":", "project", "=", "get_project", "(", "options", ")", "print", "'Creating instance {project}/{zone}/{instance}'", ".", "format", "(", "project", "=", "project", ",", "zone", "=", "get_zone", "(", "options", ")", ...
the generic script can be customised by using parameters .
train
false
29,658
def toBytes(url): if _is_unicode(url): try: url = url.encode('ASCII') except UnicodeError: raise UnicodeError((('URL ' + repr(url)) + ' contains non-ASCII characters')) return url
[ "def", "toBytes", "(", "url", ")", ":", "if", "_is_unicode", "(", "url", ")", ":", "try", ":", "url", "=", "url", ".", "encode", "(", "'ASCII'", ")", "except", "UnicodeError", ":", "raise", "UnicodeError", "(", "(", "(", "'URL '", "+", "repr", "(", ...
tobytes --> url .
train
false
29,659
def is_bare_console(): if (platform.system() == 'Windows'): try: import ctypes GetConsoleProcessList = ctypes.windll.kernel32.GetConsoleProcessList num = GetConsoleProcessList(ctypes.byref(ctypes.c_int(0)), ctypes.c_int(1)) if (num == 1): return True except Exception: pass return False
[ "def", "is_bare_console", "(", ")", ":", "if", "(", "platform", ".", "system", "(", ")", "==", "'Windows'", ")", ":", "try", ":", "import", "ctypes", "GetConsoleProcessList", "=", "ctypes", ".", "windll", ".", "kernel32", ".", "GetConsoleProcessList", "num",...
returns true if overviewer is running in a bare console in windows .
train
false
29,660
def alpha_for_triangle(points, m, n): alpha = zeros((m, n)) for i in range(min(points[0]), max(points[0])): for j in range(min(points[1]), max(points[1])): x = linalg.solve(points, [i, j, 1]) if (min(x) > 0): alpha[(i, j)] = 1 return alpha
[ "def", "alpha_for_triangle", "(", "points", ",", "m", ",", "n", ")", ":", "alpha", "=", "zeros", "(", "(", "m", ",", "n", ")", ")", "for", "i", "in", "range", "(", "min", "(", "points", "[", "0", "]", ")", ",", "max", "(", "points", "[", "0",...
creates alpha map of size for a triangle with corners defined by points .
train
false
29,661
def get_integration_service_events(): events = {} for service in frappe.get_all(u'Integration Service', filters={u'enabled': 1}, fields=[u'name']): controller = get_integration_controller(service.name) if hasattr(controller, u'scheduler_events'): for (key, handlers) in controller.scheduler_events.items(): events.setdefault(key, []).extend(handlers) return events
[ "def", "get_integration_service_events", "(", ")", ":", "events", "=", "{", "}", "for", "service", "in", "frappe", ".", "get_all", "(", "u'Integration Service'", ",", "filters", "=", "{", "u'enabled'", ":", "1", "}", ",", "fields", "=", "[", "u'name'", "]"...
get scheduler events for enabled integrations .
train
false
29,662
def restore_ccx_collection(field_value, ccx_id=None): if (ccx_id is None): return field_value if isinstance(field_value, list): field_value = [restore_ccx(fv, ccx_id) for fv in field_value] elif isinstance(field_value, dict): for (key, val) in field_value.iteritems(): field_value[key] = restore_ccx(val, ccx_id) else: field_value = restore_ccx(field_value, ccx_id) return field_value
[ "def", "restore_ccx_collection", "(", "field_value", ",", "ccx_id", "=", "None", ")", ":", "if", "(", "ccx_id", "is", "None", ")", ":", "return", "field_value", "if", "isinstance", "(", "field_value", ",", "list", ")", ":", "field_value", "=", "[", "restor...
restore references to a ccx to collections of incoming values returns the original collection with all values converted to a ccx-aware state .
train
false
29,666
def _validate_variable_names(variables): variable_names = [v.name for v in variables] name_counts = Counter(variable_names) if (None in name_counts): none_names = [v for v in variables if (v.name is None)] raise ValueError('Variables must have names: {}'.format(none_names)) if any(((v > 1) for v in name_counts.values())): raise ValueError('Variables should have unique names. Duplicates: {}'.format(', '.join((k for (k, v) in name_counts.items() if (v > 1)))))
[ "def", "_validate_variable_names", "(", "variables", ")", ":", "variable_names", "=", "[", "v", ".", "name", "for", "v", "in", "variables", "]", "name_counts", "=", "Counter", "(", "variable_names", ")", "if", "(", "None", "in", "name_counts", ")", ":", "n...
check for missing and duplicate variable names .
train
false
29,668
def repo_absent(name, profile='github', **kwargs): ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''} try: target = __salt__['github.get_repo_info'](name, profile=profile, **kwargs) except CommandExecutionError: target = None if (not target): ret['comment'] = 'Repo {0} does not exist'.format(name) ret['result'] = True return ret else: if __opts__['test']: ret['comment'] = 'Repo {0} will be deleted'.format(name) ret['result'] = None return ret result = __salt__['github.remove_repo'](name, profile=profile, **kwargs) if result: ret['comment'] = 'Deleted repo {0}'.format(name) ret['changes'].setdefault('old', 'Repo {0} exists'.format(name)) ret['changes'].setdefault('new', 'Repo {0} deleted'.format(name)) ret['result'] = True else: ret['comment'] = 'Failed to delete repo {0}. Ensure the delete_repo scope is enabled if using OAuth.'.format(name) ret['result'] = False return ret
[ "def", "repo_absent", "(", "name", ",", "profile", "=", "'github'", ",", "**", "kwargs", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "None", ",", "'comment'", ":", "''", "}", "try", ":", ...
ensure a repo is absent .
train
true
29,669
def show_ver(): try: ret = sendline('show ver') except TerminalException as e: log.error(e) return 'Failed to "show ver"' return ret
[ "def", "show_ver", "(", ")", ":", "try", ":", "ret", "=", "sendline", "(", "'show ver'", ")", "except", "TerminalException", "as", "e", ":", "log", ".", "error", "(", "e", ")", "return", "'Failed to \"show ver\"'", "return", "ret" ]
shortcut to run show ver on switch .
train
false
29,670
def builders_from(registry, xml_parent, data): pbs = XML.SubElement(xml_parent, 'hudson.plugins.templateproject.ProxyBuilder') XML.SubElement(pbs, 'projectName').text = data
[ "def", "builders_from", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "pbs", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.templateproject.ProxyBuilder'", ")", "XML", ".", "SubElement", "(", "pbs", ",", "'projectName'", ")"...
yaml: builders-from use builders from another project .
train
false
29,672
def encoded_url(url): return bytes(url.toEncoded()).decode('ascii')
[ "def", "encoded_url", "(", "url", ")", ":", "return", "bytes", "(", "url", ".", "toEncoded", "(", ")", ")", ".", "decode", "(", "'ascii'", ")" ]
return the fully encoded url as string .
train
false
29,674
def _compute_cost_div_m(m, p, norm_info): return int(np.ceil((norm_info.alpha(p) / _theta[m])))
[ "def", "_compute_cost_div_m", "(", "m", ",", "p", ",", "norm_info", ")", ":", "return", "int", "(", "np", ".", "ceil", "(", "(", "norm_info", ".", "alpha", "(", "p", ")", "/", "_theta", "[", "m", "]", ")", ")", ")" ]
a helper function for computing bounds .
train
false
29,675
def _wait_for_handles(handles, timeout=(-1)): arrtype = (HANDLE * len(handles)) handle_array = arrtype(*handles) ret = windll.kernel32.WaitForMultipleObjects(len(handle_array), handle_array, BOOL(False), DWORD(timeout)) if (ret == WAIT_TIMEOUT): return None else: h = handle_array[ret] return h
[ "def", "_wait_for_handles", "(", "handles", ",", "timeout", "=", "(", "-", "1", ")", ")", ":", "arrtype", "=", "(", "HANDLE", "*", "len", "(", "handles", ")", ")", "handle_array", "=", "arrtype", "(", "*", "handles", ")", "ret", "=", "windll", ".", ...
waits for multiple handles .
train
true
29,677
def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): while True: chunk = file.read(size) if (not chunk): break (yield chunk)
[ "def", "read_chunks", "(", "file", ",", "size", "=", "io", ".", "DEFAULT_BUFFER_SIZE", ")", ":", "while", "True", ":", "chunk", "=", "file", ".", "read", "(", "size", ")", "if", "(", "not", "chunk", ")", ":", "break", "(", "yield", "chunk", ")" ]
yield pieces of data from a file-like object until eof .
train
true
29,679
def get_task(name): host = __salt__['config.option']('kapacitor.host', 'localhost') port = __salt__['config.option']('kapacitor.port', 9092) if (version() < '0.13'): url = 'http://{0}:{1}/task?name={2}'.format(host, port, name) else: url = 'http://{0}:{1}/kapacitor/v1/tasks/{2}?skip-format=true'.format(host, port, name) response = salt.utils.http.query(url, status=True) if (response['status'] == 404): return None data = json.loads(response['body']) if (version() < '0.13'): return {'script': data['TICKscript'], 'type': data['Type'], 'dbrps': data['DBRPs'], 'enabled': data['Enabled']} return {'script': data['script'], 'type': data['type'], 'dbrps': data['dbrps'], 'enabled': (data['status'] == 'enabled')}
[ "def", "get_task", "(", "name", ")", ":", "host", "=", "__salt__", "[", "'config.option'", "]", "(", "'kapacitor.host'", ",", "'localhost'", ")", "port", "=", "__salt__", "[", "'config.option'", "]", "(", "'kapacitor.port'", ",", "9092", ")", "if", "(", "v...
get a dict of data on a task .
train
false
29,680
@event.listens_for(ServiceRouterBinding.resource_type, 'set', retval=True) def validate_resource_type(target, value, oldvalue, initiator): maxlen = ServiceRouterBinding.resource_type.property.columns[0].type.length if (len(value) > maxlen): raise AttributeException(resource_type=value, maxlen=maxlen) return value
[ "@", "event", ".", "listens_for", "(", "ServiceRouterBinding", ".", "resource_type", ",", "'set'", ",", "retval", "=", "True", ")", "def", "validate_resource_type", "(", "target", ",", "value", ",", "oldvalue", ",", "initiator", ")", ":", "maxlen", "=", "Ser...
make sure the resource type fit the resource_type column .
train
false
29,681
@open_file(0, mode='rb') def read_edgelist(path, comments='#', delimiter=None, create_using=None, nodetype=None, data=True, edgetype=None, encoding='utf-8'): lines = (line.decode(encoding) for line in path) return parse_edgelist(lines, comments=comments, delimiter=delimiter, create_using=create_using, nodetype=nodetype, data=data)
[ "@", "open_file", "(", "0", ",", "mode", "=", "'rb'", ")", "def", "read_edgelist", "(", "path", ",", "comments", "=", "'#'", ",", "delimiter", "=", "None", ",", "create_using", "=", "None", ",", "nodetype", "=", "None", ",", "data", "=", "True", ",",...
read a bipartite graph from a list of edges .
train
false
29,682
def test_mutable_json_type(db, config, default_account, folder): from inbox.models.backends.imap import ImapFolderSyncStatus sync_status = ImapFolderSyncStatus(account_id=default_account.id, folder=folder) db.session.add(sync_status) db.session.commit() original_metrics = sync_status.metrics metrics = dict(download_uid_count=10, queue_checked_at=datetime.utcnow()) sync_status.update_metrics(metrics) updated_metrics = sync_status.metrics metrics.update(original_metrics) assert ((updated_metrics != original_metrics) and (updated_metrics == metrics)), 'metrics not updated correctly' new_metrics = dict(delete_uid_count=50, download_uid_count=100, queue_checked_at=datetime.utcnow()) sync_status.update_metrics(new_metrics) latest_metrics = sync_status.metrics metrics.update(new_metrics) assert (latest_metrics == metrics), 'metrics not re-updated correctly'
[ "def", "test_mutable_json_type", "(", "db", ",", "config", ",", "default_account", ",", "folder", ")", ":", "from", "inbox", ".", "models", ".", "backends", ".", "imap", "import", "ImapFolderSyncStatus", "sync_status", "=", "ImapFolderSyncStatus", "(", "account_id...
test that foldersync .
train
false
29,683
def assert_all_changes(start_state, end_state, expected_changes): __tracebackhide__ = True start_files = start_state end_files = end_state if isinstance(start_state, TestPipResult): start_files = start_state.files_before if isinstance(end_state, TestPipResult): end_files = end_state.files_after diff = diff_states(start_files, end_files, ignore=expected_changes) if (list(diff.values()) != [{}, {}, {}]): raise TestFailure(('Unexpected changes:\n' + '\n'.join([((k + ': ') + ', '.join(v.keys())) for (k, v) in diff.items()]))) return diff
[ "def", "assert_all_changes", "(", "start_state", ",", "end_state", ",", "expected_changes", ")", ":", "__tracebackhide__", "=", "True", "start_files", "=", "start_state", "end_files", "=", "end_state", "if", "isinstance", "(", "start_state", ",", "TestPipResult", ")...
fails if anything changed that isnt listed in the expected_changes .
train
false
29,684
def clean_app_id(app_id): if app_id.startswith('s~'): return app_id[2:] return app_id
[ "def", "clean_app_id", "(", "app_id", ")", ":", "if", "app_id", ".", "startswith", "(", "'s~'", ")", ":", "return", "app_id", "[", "2", ":", "]", "return", "app_id" ]
google app engine uses a special prepended string to signal that it is an hrd application .
train
false
29,687
def ls_tree(repo, tree_ish=None, outstream=sys.stdout, recursive=False, name_only=False): def list_tree(store, treeid, base): for (name, mode, sha) in store[treeid].iteritems(): if base: name = posixpath.join(base, name) if name_only: outstream.write((name + '\n')) else: outstream.write(pretty_format_tree_entry(name, mode, sha)) if stat.S_ISDIR(mode): list_tree(store, sha, name) if (tree_ish is None): tree_ish = 'HEAD' with open_repo_closing(repo) as r: c = r[tree_ish] treeid = c.tree list_tree(r.object_store, treeid, '')
[ "def", "ls_tree", "(", "repo", ",", "tree_ish", "=", "None", ",", "outstream", "=", "sys", ".", "stdout", ",", "recursive", "=", "False", ",", "name_only", "=", "False", ")", ":", "def", "list_tree", "(", "store", ",", "treeid", ",", "base", ")", ":"...
list contents of a tree .
train
false
29,688
def _get_display_range(image): ip = _get_image_properties(image) (immin, immax) = (np.min(image), np.max(image)) if ip.signed: magnitude = max(abs(immin), abs(immax)) (lo, hi) = ((- magnitude), magnitude) cmap = _diverging_colormap elif any(ip): _raise_warnings(ip) (lo, hi) = (immin, immax) cmap = _nonstandard_colormap else: lo = 0 imtype = image.dtype.type hi = dtypes.dtype_range[imtype][1] cmap = _default_colormap return (lo, hi, cmap)
[ "def", "_get_display_range", "(", "image", ")", ":", "ip", "=", "_get_image_properties", "(", "image", ")", "(", "immin", ",", "immax", ")", "=", "(", "np", ".", "min", "(", "image", ")", ",", "np", ".", "max", "(", "image", ")", ")", "if", "ip", ...
return the display range for a given set of image properties .
train
false
29,689
def fit_fr(self, data, *args, **kwds): (loc0, scale0) = lmap(kwds.get, ['loc', 'scale'], [0.0, 1.0]) Narg = len(args) if ((Narg == 0) and hasattr(self, '_fitstart')): x0 = self._fitstart(data) elif (Narg > self.numargs): raise ValueError('Too many input arguments.') else: args += ((1.0,) * (self.numargs - Narg)) x0 = (args + (loc0, scale0)) if ('frozen' in kwds): frmask = np.array(kwds['frozen']) if (len(frmask) != (self.numargs + 2)): raise ValueError('Incorrect number of frozen arguments.') else: x0 = np.array(x0)[np.isnan(frmask)] else: frmask = None return optimize.fmin(self.nnlf_fr, x0, args=(np.ravel(data), frmask), disp=0)
[ "def", "fit_fr", "(", "self", ",", "data", ",", "*", "args", ",", "**", "kwds", ")", ":", "(", "loc0", ",", "scale0", ")", "=", "lmap", "(", "kwds", ".", "get", ",", "[", "'loc'", ",", "'scale'", "]", ",", "[", "0.0", ",", "1.0", "]", ")", ...
estimate distribution parameters by mle taking some parameters as fixed parameters data : array .
train
false
29,692
def split_jaspar_id(id): id_split = id.split('.') base_id = None version = None if (len(id_split) == 2): base_id = id_split[0] version = id_split[1] else: base_id = id return (base_id, version)
[ "def", "split_jaspar_id", "(", "id", ")", ":", "id_split", "=", "id", ".", "split", "(", "'.'", ")", "base_id", "=", "None", "version", "=", "None", "if", "(", "len", "(", "id_split", ")", "==", "2", ")", ":", "base_id", "=", "id_split", "[", "0", ...
utility function to split a jaspar matrix id into its component .
train
false
29,693
def fix_pyext(mod_path): if (mod_path[(-4):] in ['.pyo', 'pyc']): mod_path = mod_path[:(-1)] return mod_path
[ "def", "fix_pyext", "(", "mod_path", ")", ":", "if", "(", "mod_path", "[", "(", "-", "4", ")", ":", "]", "in", "[", "'.pyo'", ",", "'pyc'", "]", ")", ":", "mod_path", "=", "mod_path", "[", ":", "(", "-", "1", ")", "]", "return", "mod_path" ]
fix a module filename path extension to always end with the modules source file .
train
false
29,694
def AddCrossEntropy(batch_size, n): cross_entropies = [] def _Pass(): return tf.constant(0, dtype=tf.float32, shape=[1]) for beam_id in range(batch_size): beam_gold_slot = tf.reshape(tf.slice(n['gold_slot'], [beam_id], [1]), [1]) def _ComputeCrossEntropy(): 'Adds ops to compute cross entropy of the gold path in a beam.' idx = tf.cast(tf.reshape(tf.where(tf.equal(n['beam_ids'], beam_id)), [(-1)]), tf.int32) beam_scores = tf.reshape(tf.gather(n['all_path_scores'], idx), [1, (-1)]) num = tf.shape(idx) return tf.nn.softmax_cross_entropy_with_logits(beam_scores, tf.expand_dims(tf.sparse_to_dense(beam_gold_slot, num, [1.0], 0.0), 0)) cross_entropies.append(cf.cond((beam_gold_slot[0] >= 0), _ComputeCrossEntropy, _Pass)) return {'cross_entropy': tf.div(tf.add_n(cross_entropies), batch_size)}
[ "def", "AddCrossEntropy", "(", "batch_size", ",", "n", ")", ":", "cross_entropies", "=", "[", "]", "def", "_Pass", "(", ")", ":", "return", "tf", ".", "constant", "(", "0", ",", "dtype", "=", "tf", ".", "float32", ",", "shape", "=", "[", "1", "]", ...
adds a cross entropy cost function .
train
false
29,695
def _test_output(ret, action, params): if (action == 'list'): ret['comment'] += 'The list action will just list an entity and will make no changes.\n' elif ((action == 'create') or (action == 'add')): ret['comment'] += 'The create action will attempt to create an entity if it does not already exist.\n' elif (action == 'delete'): ret['comment'] += 'The delete action will attempt to delete an existing entity if it exists.\n' elif (action == 'manage'): ret['comment'] += 'The manage action will create a new entity if it does not exist. If it does exist, it will be enforcedto the desired state.\n' elif (action == 'modify'): ret['comment'] += 'The modify action will attempt to modify an existing entity only if it exists.\n' ret['comment'] += 'An iControl REST Request will be made using the parameters:\n' ret['comment'] += json.dumps(params, indent=4) ret['changes'] = {} ret['result'] = None return ret
[ "def", "_test_output", "(", "ret", ",", "action", ",", "params", ")", ":", "if", "(", "action", "==", "'list'", ")", ":", "ret", "[", "'comment'", "]", "+=", "'The list action will just list an entity and will make no changes.\\n'", "elif", "(", "(", "action", "...
for testing just output what the state will attempt to do without actually doing it .
train
false
29,696
def test_init_variations(): dt_tiny_sec = (dt_tiny.jd2 * 86400.0) t1 = (Time(100000000000.0, format='cxcsec') + dt_tiny) t2 = Time(100000000000.0, dt_tiny_sec, format='cxcsec') t3 = Time(dt_tiny_sec, 100000000000.0, format='cxcsec') assert (t1.jd1 == t2.jd1) assert (t1.jd2 == t3.jd2) assert (t1.jd1 == t2.jd1) assert (t1.jd2 == t3.jd2)
[ "def", "test_init_variations", "(", ")", ":", "dt_tiny_sec", "=", "(", "dt_tiny", ".", "jd2", "*", "86400.0", ")", "t1", "=", "(", "Time", "(", "100000000000.0", ",", "format", "=", "'cxcsec'", ")", "+", "dt_tiny", ")", "t2", "=", "Time", "(", "1000000...
check that 3 ways of specifying a time + small offset are equivalent .
train
false
29,698
def bzr_wc_default_target(): test = 'bzr_wc_default_target' puts(magenta(('Executing test: %s' % test))) from fabtools.files import is_dir from fabtools import require assert (not is_dir(DIR)) require.bazaar.working_copy(REMOTE_URL) assert_wc_exists(DIR)
[ "def", "bzr_wc_default_target", "(", ")", ":", "test", "=", "'bzr_wc_default_target'", "puts", "(", "magenta", "(", "(", "'Executing test: %s'", "%", "test", ")", ")", ")", "from", "fabtools", ".", "files", "import", "is_dir", "from", "fabtools", "import", "re...
test creating a working copy at a default target location .
train
false
29,699
def get_perms(user_or_group, obj): check = ObjectPermissionChecker(user_or_group) return check.get_perms(obj)
[ "def", "get_perms", "(", "user_or_group", ",", "obj", ")", ":", "check", "=", "ObjectPermissionChecker", "(", "user_or_group", ")", "return", "check", ".", "get_perms", "(", "obj", ")" ]
returns permissions for given user/group and object pair .
train
false
29,700
def send_post_signup_email(user_id): for (key, content) in SIGNUP_EMAIL_CONTENT.value.iteritems(): if (content == SIGNUP_EMAIL_CONTENT.default_value[key]): log_new_error('Please ensure that the value for the admin config property SIGNUP_EMAIL_CONTENT is set, before allowing post-signup emails to be sent.') return user_settings = user_services.get_user_settings(user_id) email_subject = SIGNUP_EMAIL_CONTENT.value['subject'] email_body = ('Hi %s,<br><br>%s<br><br>%s' % (user_settings.username, SIGNUP_EMAIL_CONTENT.value['html_body'], EMAIL_FOOTER.value)) _send_email(user_id, feconf.SYSTEM_COMMITTER_ID, feconf.EMAIL_INTENT_SIGNUP, email_subject, email_body, feconf.NOREPLY_EMAIL_ADDRESS)
[ "def", "send_post_signup_email", "(", "user_id", ")", ":", "for", "(", "key", ",", "content", ")", "in", "SIGNUP_EMAIL_CONTENT", ".", "value", ".", "iteritems", "(", ")", ":", "if", "(", "content", "==", "SIGNUP_EMAIL_CONTENT", ".", "default_value", "[", "ke...
sends a post-signup email to the given user .
train
false
29,702
def force_header(header, value): def _decorator(func): '\n Decorates the given function.\n ' @wraps(func) def _inner(*args, **kwargs): '\n Alters the response.\n ' response = func(*args, **kwargs) force_header_for_response(response, header, value) return response return _inner return _decorator
[ "def", "force_header", "(", "header", ",", "value", ")", ":", "def", "_decorator", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "_inner", "(", "*", "args", ",", "**", "kwargs", ")", ":", "response", "=", "func", "(", "*", "args", ...
decorator that forces a header in the response to have a specific value .
train
false
29,709
def extractZip(archive, targetDir): try: if (not os.path.exists(targetDir)): os.mkdir(targetDir) zip_file = zipfile.ZipFile(archive, u'r', allowZip64=True) for member in zip_file.namelist(): filename = os.path.basename(member) if (not filename): continue source = zip_file.open(member) target = io.open(os.path.join(targetDir, filename), u'wb') shutil.copyfileobj(source, target) source.close() target.close() zip_file.close() return True except Exception as e: sickrage.srCore.srLogger.error((u'Zip extraction error: %r ' % repr(e))) return False
[ "def", "extractZip", "(", "archive", ",", "targetDir", ")", ":", "try", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "targetDir", ")", ")", ":", "os", ".", "mkdir", "(", "targetDir", ")", "zip_file", "=", "zipfile", ".", "ZipFile", ...
unzip a file to a directory .
train
false
29,711
def p_init_declarator_2(t): pass
[ "def", "p_init_declarator_2", "(", "t", ")", ":", "pass" ]
init_declarator : declarator equals initializer .
train
false
29,712
def once(func): lock = threading.Lock() def new_func(*args, **kwargs): if new_func.called: return with lock: if new_func.called: return rv = func(*args, **kwargs) new_func.called = True return rv new_func = update_wrapper(new_func, func) new_func.called = False return new_func
[ "def", "once", "(", "func", ")", ":", "lock", "=", "threading", ".", "Lock", "(", ")", "def", "new_func", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "new_func", ".", "called", ":", "return", "with", "lock", ":", "if", "new_func", ".", ...
runs a thing once and once only .
train
true
29,714
def _eps_cast(dtyp): dt = dtyp if (dt == np.longdouble): dt = np.float64 elif (dt == np.clongdouble): dt = np.complex128 return np.finfo(dt).eps
[ "def", "_eps_cast", "(", "dtyp", ")", ":", "dt", "=", "dtyp", "if", "(", "dt", "==", "np", ".", "longdouble", ")", ":", "dt", "=", "np", ".", "float64", "elif", "(", "dt", "==", "np", ".", "clongdouble", ")", ":", "dt", "=", "np", ".", "complex...
get the epsilon for dtype .
train
false
29,715
def output_storage_cli_info(name, version): LOG.info(_LI(' DCTB %(name)-35s%(version)s'), {'name': (name + ' version: '), 'version': version})
[ "def", "output_storage_cli_info", "(", "name", ",", "version", ")", ":", "LOG", ".", "info", "(", "_LI", "(", "' DCTB %(name)-35s%(version)s'", ")", ",", "{", "'name'", ":", "(", "name", "+", "' version: '", ")", ",", "'version'", ":", "version", "}", ")" ...
output storage cli info to the log file .
train
false
29,716
def get_makefile_filename(): if python_build: return os.path.join(project_base, 'Makefile') lib_dir = get_python_lib(plat_specific=1, standard_lib=1) return os.path.join(lib_dir, 'config', 'Makefile')
[ "def", "get_makefile_filename", "(", ")", ":", "if", "python_build", ":", "return", "os", ".", "path", ".", "join", "(", "project_base", ",", "'Makefile'", ")", "lib_dir", "=", "get_python_lib", "(", "plat_specific", "=", "1", ",", "standard_lib", "=", "1", ...
return the path of the makefile .
train
false