id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
12,449
def _is_inline_definition(arg): return (isinstance(arg, dict) and (len(arg) == 1) and isinstance(next(six.itervalues(arg)), list))
[ "def", "_is_inline_definition", "(", "arg", ")", ":", "return", "(", "isinstance", "(", "arg", ",", "dict", ")", "and", "(", "len", "(", "arg", ")", "==", "1", ")", "and", "isinstance", "(", "next", "(", "six", ".", "itervalues", "(", "arg", ")", ")", ",", "list", ")", ")" ]
returns true .
train
true
12,450
def getRaisedHeightGrid(heightGrid, start): raisedHeightGrid = [] remainingHeight = (1.0 - start) for row in heightGrid: raisedRow = [] raisedHeightGrid.append(raisedRow) for element in row: raisedElement = ((remainingHeight * element) + start) raisedRow.append(raisedElement) return raisedHeightGrid
[ "def", "getRaisedHeightGrid", "(", "heightGrid", ",", "start", ")", ":", "raisedHeightGrid", "=", "[", "]", "remainingHeight", "=", "(", "1.0", "-", "start", ")", "for", "row", "in", "heightGrid", ":", "raisedRow", "=", "[", "]", "raisedHeightGrid", ".", "append", "(", "raisedRow", ")", "for", "element", "in", "row", ":", "raisedElement", "=", "(", "(", "remainingHeight", "*", "element", ")", "+", "start", ")", "raisedRow", ".", "append", "(", "raisedElement", ")", "return", "raisedHeightGrid" ]
get heightgrid raised above start .
train
false
12,451
def write_tree_diff(f, store, old_tree, new_tree, diff_binary=False): changes = store.tree_changes(old_tree, new_tree) for ((oldpath, newpath), (oldmode, newmode), (oldsha, newsha)) in changes: write_object_diff(f, store, (oldpath, oldmode, oldsha), (newpath, newmode, newsha), diff_binary=diff_binary)
[ "def", "write_tree_diff", "(", "f", ",", "store", ",", "old_tree", ",", "new_tree", ",", "diff_binary", "=", "False", ")", ":", "changes", "=", "store", ".", "tree_changes", "(", "old_tree", ",", "new_tree", ")", "for", "(", "(", "oldpath", ",", "newpath", ")", ",", "(", "oldmode", ",", "newmode", ")", ",", "(", "oldsha", ",", "newsha", ")", ")", "in", "changes", ":", "write_object_diff", "(", "f", ",", "store", ",", "(", "oldpath", ",", "oldmode", ",", "oldsha", ")", ",", "(", "newpath", ",", "newmode", ",", "newsha", ")", ",", "diff_binary", "=", "diff_binary", ")" ]
write tree diff .
train
false
12,452
def Any(s): result = CodeRanges(chars_to_ranges(s)) result.str = ('Any(%s)' % repr(s)) return result
[ "def", "Any", "(", "s", ")", ":", "result", "=", "CodeRanges", "(", "chars_to_ranges", "(", "s", ")", ")", "result", ".", "str", "=", "(", "'Any(%s)'", "%", "repr", "(", "s", ")", ")", "return", "result" ]
returns true if any element of the sequence satisfies true .
train
false
12,453
@hug.startup() def add_data(api): data.append("It's working")
[ "@", "hug", ".", "startup", "(", ")", "def", "add_data", "(", "api", ")", ":", "data", ".", "append", "(", "\"It's working\"", ")" ]
adds initial data to the api on startup .
train
false
12,454
def play_simple(paths): p = GstPlayer() p.run() for path in paths: p.play_file(path) p.block()
[ "def", "play_simple", "(", "paths", ")", ":", "p", "=", "GstPlayer", "(", ")", "p", ".", "run", "(", ")", "for", "path", "in", "paths", ":", "p", ".", "play_file", "(", "path", ")", "p", ".", "block", "(", ")" ]
play the files in paths in a straightforward way .
train
false
12,455
def quota_class_destroy_all_by_name(context, class_name): return IMPL.quota_class_destroy_all_by_name(context, class_name)
[ "def", "quota_class_destroy_all_by_name", "(", "context", ",", "class_name", ")", ":", "return", "IMPL", ".", "quota_class_destroy_all_by_name", "(", "context", ",", "class_name", ")" ]
destroy all quotas associated with a given quota class .
train
false
12,456
def rescale_intensity(image, in_range='image', out_range='dtype'): dtype = image.dtype.type (imin, imax) = intensity_range(image, in_range) (omin, omax) = intensity_range(image, out_range, clip_negative=(imin >= 0)) image = np.clip(image, imin, imax) image = ((image - imin) / float((imax - imin))) return dtype(((image * (omax - omin)) + omin))
[ "def", "rescale_intensity", "(", "image", ",", "in_range", "=", "'image'", ",", "out_range", "=", "'dtype'", ")", ":", "dtype", "=", "image", ".", "dtype", ".", "type", "(", "imin", ",", "imax", ")", "=", "intensity_range", "(", "image", ",", "in_range", ")", "(", "omin", ",", "omax", ")", "=", "intensity_range", "(", "image", ",", "out_range", ",", "clip_negative", "=", "(", "imin", ">=", "0", ")", ")", "image", "=", "np", ".", "clip", "(", "image", ",", "imin", ",", "imax", ")", "image", "=", "(", "(", "image", "-", "imin", ")", "/", "float", "(", "(", "imax", "-", "imin", ")", ")", ")", "return", "dtype", "(", "(", "(", "image", "*", "(", "omax", "-", "omin", ")", ")", "+", "omin", ")", ")" ]
return image after stretching or shrinking its intensity levels .
train
false
12,457
def get_distribution_version(): if (platform.system() == 'Linux'): try: distribution_version = platform.linux_distribution()[1] if ((not distribution_version) and os.path.isfile('/etc/system-release')): distribution_version = platform.linux_distribution(supported_dists=['system'])[1] except: distribution_version = platform.dist()[1] else: distribution_version = None return distribution_version
[ "def", "get_distribution_version", "(", ")", ":", "if", "(", "platform", ".", "system", "(", ")", "==", "'Linux'", ")", ":", "try", ":", "distribution_version", "=", "platform", ".", "linux_distribution", "(", ")", "[", "1", "]", "if", "(", "(", "not", "distribution_version", ")", "and", "os", ".", "path", ".", "isfile", "(", "'/etc/system-release'", ")", ")", ":", "distribution_version", "=", "platform", ".", "linux_distribution", "(", "supported_dists", "=", "[", "'system'", "]", ")", "[", "1", "]", "except", ":", "distribution_version", "=", "platform", ".", "dist", "(", ")", "[", "1", "]", "else", ":", "distribution_version", "=", "None", "return", "distribution_version" ]
return the distribution version .
train
false
12,458
def is_production_filename(filename): return (('test' not in filename) and ('docs' not in filename))
[ "def", "is_production_filename", "(", "filename", ")", ":", "return", "(", "(", "'test'", "not", "in", "filename", ")", "and", "(", "'docs'", "not", "in", "filename", ")", ")" ]
checks if the file contains production code .
train
false
12,459
def list_to_filename(filelist): if (len(filelist) > 1): return filelist else: return filelist[0]
[ "def", "list_to_filename", "(", "filelist", ")", ":", "if", "(", "len", "(", "filelist", ")", ">", "1", ")", ":", "return", "filelist", "else", ":", "return", "filelist", "[", "0", "]" ]
returns a list if filelist is a list of length greater than 1 .
train
false
12,460
def execute_return_result(cmd): ret = _run_all(cmd) if ((ret['retcode'] != 0) or ('not supported' in ret['stdout'].lower())): msg = 'Command Failed: {0}\n'.format(cmd) msg += 'Return Code: {0}\n'.format(ret['retcode']) msg += 'Output: {0}\n'.format(ret['stdout']) msg += 'Error: {0}\n'.format(ret['stderr']) raise CommandExecutionError(msg) return ret['stdout']
[ "def", "execute_return_result", "(", "cmd", ")", ":", "ret", "=", "_run_all", "(", "cmd", ")", "if", "(", "(", "ret", "[", "'retcode'", "]", "!=", "0", ")", "or", "(", "'not supported'", "in", "ret", "[", "'stdout'", "]", ".", "lower", "(", ")", ")", ")", ":", "msg", "=", "'Command Failed: {0}\\n'", ".", "format", "(", "cmd", ")", "msg", "+=", "'Return Code: {0}\\n'", ".", "format", "(", "ret", "[", "'retcode'", "]", ")", "msg", "+=", "'Output: {0}\\n'", ".", "format", "(", "ret", "[", "'stdout'", "]", ")", "msg", "+=", "'Error: {0}\\n'", ".", "format", "(", "ret", "[", "'stderr'", "]", ")", "raise", "CommandExecutionError", "(", "msg", ")", "return", "ret", "[", "'stdout'", "]" ]
executes the passed command .
train
true
12,461
def process_input(input, logfile, log_timestamp_format=None, alert_hooks=()): while True: line = input.readline() if (len(line) == 0): write_logline(logfile, TERM_MSG, log_timestamp_format) break if (line == '\n'): continue write_logline(logfile, line, log_timestamp_format) for (regex, callback) in alert_hooks: match = re.match(regex, line.strip()) if match: callback(*match.groups())
[ "def", "process_input", "(", "input", ",", "logfile", ",", "log_timestamp_format", "=", "None", ",", "alert_hooks", "=", "(", ")", ")", ":", "while", "True", ":", "line", "=", "input", ".", "readline", "(", ")", "if", "(", "len", "(", "line", ")", "==", "0", ")", ":", "write_logline", "(", "logfile", ",", "TERM_MSG", ",", "log_timestamp_format", ")", "break", "if", "(", "line", "==", "'\\n'", ")", ":", "continue", "write_logline", "(", "logfile", ",", "line", ",", "log_timestamp_format", ")", "for", "(", "regex", ",", "callback", ")", "in", "alert_hooks", ":", "match", "=", "re", ".", "match", "(", "regex", ",", "line", ".", "strip", "(", ")", ")", "if", "match", ":", "callback", "(", "*", "match", ".", "groups", "(", ")", ")" ]
continuously read lines from input stream and: - write them to log .
train
false
12,462
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
explicitly initializes the python imaging library .
train
false
12,463
def test_interesting_type_implementations(): global called clr.AddReference('IronPythonTest') import IronPythonTest.interop.net.type.clrtype as IPT from IronPython.Runtime.Types import PythonType for x in [IPT.Sanity, IPT.SanityGeneric[int], IPT.SanityGenericConstructor[PythonType], IPT.SanityDerived, IPT.SanityUniqueConstructor, IPT.SanityNoIPythonObject]: called = False class MyType(type, ): def __clrtype__(self): global called called = True return x class X(object, ): __metaclass__ = MyType AreEqual(called, True) if (x != IPT.SanityUniqueConstructor): temp = X() if (x == IPT.SanityNoIPythonObject): AreEqual(type(temp), x) else: AreEqual(type(temp), X)
[ "def", "test_interesting_type_implementations", "(", ")", ":", "global", "called", "clr", ".", "AddReference", "(", "'IronPythonTest'", ")", "import", "IronPythonTest", ".", "interop", ".", "net", ".", "type", ".", "clrtype", "as", "IPT", "from", "IronPython", ".", "Runtime", ".", "Types", "import", "PythonType", "for", "x", "in", "[", "IPT", ".", "Sanity", ",", "IPT", ".", "SanityGeneric", "[", "int", "]", ",", "IPT", ".", "SanityGenericConstructor", "[", "PythonType", "]", ",", "IPT", ".", "SanityDerived", ",", "IPT", ".", "SanityUniqueConstructor", ",", "IPT", ".", "SanityNoIPythonObject", "]", ":", "called", "=", "False", "class", "MyType", "(", "type", ",", ")", ":", "def", "__clrtype__", "(", "self", ")", ":", "global", "called", "called", "=", "True", "return", "x", "class", "X", "(", "object", ",", ")", ":", "__metaclass__", "=", "MyType", "AreEqual", "(", "called", ",", "True", ")", "if", "(", "x", "!=", "IPT", ".", "SanityUniqueConstructor", ")", ":", "temp", "=", "X", "(", ")", "if", "(", "x", "==", "IPT", ".", "SanityNoIPythonObject", ")", ":", "AreEqual", "(", "type", "(", "temp", ")", ",", "x", ")", "else", ":", "AreEqual", "(", "type", "(", "temp", ")", ",", "X", ")" ]
test types that have been fully implemented in csharp .
train
false
12,464
def get_params_from_doc(cmd, style=u'--', help_flag=None, trap_error=True): res = CommandLine((u'which %s' % cmd.split(u' ')[0]), terminal_output=u'allatonce').run() cmd_path = res.runtime.stdout.strip() if (cmd_path == u''): raise Exception((u'Command %s not found' % cmd.split(u' ')[0])) if help_flag: cmd = u' '.join((cmd, help_flag)) doc = grab_doc(cmd, trap_error) return _parse_doc(doc, style)
[ "def", "get_params_from_doc", "(", "cmd", ",", "style", "=", "u'--'", ",", "help_flag", "=", "None", ",", "trap_error", "=", "True", ")", ":", "res", "=", "CommandLine", "(", "(", "u'which %s'", "%", "cmd", ".", "split", "(", "u' '", ")", "[", "0", "]", ")", ",", "terminal_output", "=", "u'allatonce'", ")", ".", "run", "(", ")", "cmd_path", "=", "res", ".", "runtime", ".", "stdout", ".", "strip", "(", ")", "if", "(", "cmd_path", "==", "u''", ")", ":", "raise", "Exception", "(", "(", "u'Command %s not found'", "%", "cmd", ".", "split", "(", "u' '", ")", "[", "0", "]", ")", ")", "if", "help_flag", ":", "cmd", "=", "u' '", ".", "join", "(", "(", "cmd", ",", "help_flag", ")", ")", "doc", "=", "grab_doc", "(", "cmd", ",", "trap_error", ")", "return", "_parse_doc", "(", "doc", ",", "style", ")" ]
auto-generate option map from command line help parameters cmd : string the command whose documentation we are fetching style : string default [--] the help command style .
train
false
12,465
def attach_volume(name=None, kwargs=None, instance_id=None, call=None): if (call != 'action'): raise SaltCloudSystemExit('The attach_volume action must be called with -a or --action.') if (not kwargs): kwargs = {} if ('instance_id' in kwargs): instance_id = kwargs['instance_id'] if (name and (not instance_id)): instance_id = _get_node(name)['instanceId'] if ((not name) and (not instance_id)): log.error('Either a name or an instance_id is required.') return False if ('volume_id' not in kwargs): log.error('A volume_id is required.') return False if ('device' not in kwargs): log.error('A device is required (ex. /dev/sdb1).') return False params = {'Action': 'AttachVolume', 'VolumeId': kwargs['volume_id'], 'InstanceId': instance_id, 'Device': kwargs['device']} log.debug(params) vm_ = get_configured_provider() data = salt.utils.cloud.wait_for_ip(__attach_vol_to_instance, update_args=(params, kwargs, instance_id), timeout=config.get_cloud_config_value('wait_for_ip_timeout', vm_, __opts__, default=(10 * 60)), interval=config.get_cloud_config_value('wait_for_ip_interval', vm_, __opts__, default=10), interval_multiplier=config.get_cloud_config_value('wait_for_ip_interval_multiplier', vm_, __opts__, default=1)) return data
[ "def", "attach_volume", "(", "name", "=", "None", ",", "kwargs", "=", "None", ",", "instance_id", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'action'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The attach_volume action must be called with -a or --action.'", ")", "if", "(", "not", "kwargs", ")", ":", "kwargs", "=", "{", "}", "if", "(", "'instance_id'", "in", "kwargs", ")", ":", "instance_id", "=", "kwargs", "[", "'instance_id'", "]", "if", "(", "name", "and", "(", "not", "instance_id", ")", ")", ":", "instance_id", "=", "_get_node", "(", "name", ")", "[", "'instanceId'", "]", "if", "(", "(", "not", "name", ")", "and", "(", "not", "instance_id", ")", ")", ":", "log", ".", "error", "(", "'Either a name or an instance_id is required.'", ")", "return", "False", "if", "(", "'volume_id'", "not", "in", "kwargs", ")", ":", "log", ".", "error", "(", "'A volume_id is required.'", ")", "return", "False", "if", "(", "'device'", "not", "in", "kwargs", ")", ":", "log", ".", "error", "(", "'A device is required (ex. /dev/sdb1).'", ")", "return", "False", "params", "=", "{", "'Action'", ":", "'AttachVolume'", ",", "'VolumeId'", ":", "kwargs", "[", "'volume_id'", "]", ",", "'InstanceId'", ":", "instance_id", ",", "'Device'", ":", "kwargs", "[", "'device'", "]", "}", "log", ".", "debug", "(", "params", ")", "vm_", "=", "get_configured_provider", "(", ")", "data", "=", "salt", ".", "utils", ".", "cloud", ".", "wait_for_ip", "(", "__attach_vol_to_instance", ",", "update_args", "=", "(", "params", ",", "kwargs", ",", "instance_id", ")", ",", "timeout", "=", "config", ".", "get_cloud_config_value", "(", "'wait_for_ip_timeout'", ",", "vm_", ",", "__opts__", ",", "default", "=", "(", "10", "*", "60", ")", ")", ",", "interval", "=", "config", ".", "get_cloud_config_value", "(", "'wait_for_ip_interval'", ",", "vm_", ",", "__opts__", ",", "default", "=", "10", ")", ",", "interval_multiplier", "=", "config", ".", "get_cloud_config_value", "(", "'wait_for_ip_interval_multiplier'", ",", "vm_", ",", "__opts__", ",", "default", "=", "1", ")", ")", "return", "data" ]
attaches a volume .
train
true
12,468
def alltt_escape(s): xlat = {'{': '\\{', '}': '\\}', '\\': '\\textbackslash{}'} return re.sub('[\\\\{}]', (lambda mo: xlat[mo.group()]), s)
[ "def", "alltt_escape", "(", "s", ")", ":", "xlat", "=", "{", "'{'", ":", "'\\\\{'", ",", "'}'", ":", "'\\\\}'", ",", "'\\\\'", ":", "'\\\\textbackslash{}'", "}", "return", "re", ".", "sub", "(", "'[\\\\\\\\{}]'", ",", "(", "lambda", "mo", ":", "xlat", "[", "mo", ".", "group", "(", ")", "]", ")", ",", "s", ")" ]
replace backslash and braces with their escaped equivalents .
train
false
12,469
def clamp(val, lower, upper): assert (lower <= upper) return min(max(val, lower), upper)
[ "def", "clamp", "(", "val", ",", "lower", ",", "upper", ")", ":", "assert", "(", "lower", "<=", "upper", ")", "return", "min", "(", "max", "(", "val", ",", "lower", ")", ",", "upper", ")" ]
clamp val to the inclusive range [lower .
train
false
12,470
def save(obj, f, pickle_module=pickle, pickle_protocol=DEFAULT_PROTOCOL): new_fd = False if isinstance(f, str): new_fd = True f = open(f, 'wb') try: return _save(obj, f, pickle_module, pickle_protocol) finally: if new_fd: f.close()
[ "def", "save", "(", "obj", ",", "f", ",", "pickle_module", "=", "pickle", ",", "pickle_protocol", "=", "DEFAULT_PROTOCOL", ")", ":", "new_fd", "=", "False", "if", "isinstance", "(", "f", ",", "str", ")", ":", "new_fd", "=", "True", "f", "=", "open", "(", "f", ",", "'wb'", ")", "try", ":", "return", "_save", "(", "obj", ",", "f", ",", "pickle_module", ",", "pickle_protocol", ")", "finally", ":", "if", "new_fd", ":", "f", ".", "close", "(", ")" ]
save tensorflow model checkpoint .
train
false
12,471
def parse(json_string): try: json_data = json.loads(json_string) except: raise SchemaParseException(('Error parsing JSON: %s' % json_string)) names = Names() return make_avsc_object(json_data, names)
[ "def", "parse", "(", "json_string", ")", ":", "try", ":", "json_data", "=", "json", ".", "loads", "(", "json_string", ")", "except", ":", "raise", "SchemaParseException", "(", "(", "'Error parsing JSON: %s'", "%", "json_string", ")", ")", "names", "=", "Names", "(", ")", "return", "make_avsc_object", "(", "json_data", ",", "names", ")" ]
given a python-like conditional statement .
train
false
12,472
@treeio_login_required @handle_response_format def source_view(request, source_id, response_format='html'): source = get_object_or_404(SaleSource, pk=source_id) if ((not request.user.profile.has_permission(source)) and (not request.user.profile.is_admin('treeio.sales'))): return user_denied(request, message="You don't have access to this Sale Status") query = Q(source=source) if request.GET: query = (query & _get_filter_query(request.GET)) orders = Object.filter_by_request(request, SaleOrder.objects.filter(query)) all_products = Object.filter_by_request(request, Product.objects.filter(parent__isnull=True)) all_sources = Object.filter_by_request(request, SaleSource.objects) return render_to_response('sales/source_view', {'source': source, 'sources': all_sources, 'products': all_products, 'orders': orders}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "treeio_login_required", "@", "handle_response_format", "def", "source_view", "(", "request", ",", "source_id", ",", "response_format", "=", "'html'", ")", ":", "source", "=", "get_object_or_404", "(", "SaleSource", ",", "pk", "=", "source_id", ")", "if", "(", "(", "not", "request", ".", "user", ".", "profile", ".", "has_permission", "(", "source", ")", ")", "and", "(", "not", "request", ".", "user", ".", "profile", ".", "is_admin", "(", "'treeio.sales'", ")", ")", ")", ":", "return", "user_denied", "(", "request", ",", "message", "=", "\"You don't have access to this Sale Status\"", ")", "query", "=", "Q", "(", "source", "=", "source", ")", "if", "request", ".", "GET", ":", "query", "=", "(", "query", "&", "_get_filter_query", "(", "request", ".", "GET", ")", ")", "orders", "=", "Object", ".", "filter_by_request", "(", "request", ",", "SaleOrder", ".", "objects", ".", "filter", "(", "query", ")", ")", "all_products", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Product", ".", "objects", ".", "filter", "(", "parent__isnull", "=", "True", ")", ")", "all_sources", "=", "Object", ".", "filter_by_request", "(", "request", ",", "SaleSource", ".", "objects", ")", "return", "render_to_response", "(", "'sales/source_view'", ",", "{", "'source'", ":", "source", ",", "'sources'", ":", "all_sources", ",", "'products'", ":", "all_products", ",", "'orders'", ":", "orders", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
orders filtered by source .
train
false
12,476
def _patch_object(target, attribute, new=DEFAULT, spec=None, create=False, spec_set=None, autospec=None, new_callable=None, **kwargs): return _patch((lambda : target), attribute, new, spec, create, spec_set, autospec, new_callable, kwargs)
[ "def", "_patch_object", "(", "target", ",", "attribute", ",", "new", "=", "DEFAULT", ",", "spec", "=", "None", ",", "create", "=", "False", ",", "spec_set", "=", "None", ",", "autospec", "=", "None", ",", "new_callable", "=", "None", ",", "**", "kwargs", ")", ":", "return", "_patch", "(", "(", "lambda", ":", "target", ")", ",", "attribute", ",", "new", ",", "spec", ",", "create", ",", "spec_set", ",", "autospec", ",", "new_callable", ",", "kwargs", ")" ]
patch the named member on an object with a mock object .
train
false
12,477
def make_hybi07_frame(buf, opcode=1): if (len(buf) > 65535): length = ('\x7f%s' % pack('>Q', len(buf))) elif (len(buf) > 125): length = ('~%s' % pack('>H', len(buf))) else: length = chr(len(buf)) header = chr((128 | opcode)) frame = ('%s%s%s' % (header, length, buf)) return frame
[ "def", "make_hybi07_frame", "(", "buf", ",", "opcode", "=", "1", ")", ":", "if", "(", "len", "(", "buf", ")", ">", "65535", ")", ":", "length", "=", "(", "'\\x7f%s'", "%", "pack", "(", "'>Q'", ",", "len", "(", "buf", ")", ")", ")", "elif", "(", "len", "(", "buf", ")", ">", "125", ")", ":", "length", "=", "(", "'~%s'", "%", "pack", "(", "'>H'", ",", "len", "(", "buf", ")", ")", ")", "else", ":", "length", "=", "chr", "(", "len", "(", "buf", ")", ")", "header", "=", "chr", "(", "(", "128", "|", "opcode", ")", ")", "frame", "=", "(", "'%s%s%s'", "%", "(", "header", ",", "length", ",", "buf", ")", ")", "return", "frame" ]
make a hybi-07 frame .
train
false
12,478
def get_per_sample_average_diversities(rarefaction_data, depth=None): if (depth is None): depth = array(rarefaction_data[3])[:, 0].max() rare_mat = array([row for row in rarefaction_data[3] if (row[0] == depth)]) rare_mat = rare_mat.mean(0)[2:] sids = rarefaction_data[0][3:] return dict(izip(sids, rare_mat))
[ "def", "get_per_sample_average_diversities", "(", "rarefaction_data", ",", "depth", "=", "None", ")", ":", "if", "(", "depth", "is", "None", ")", ":", "depth", "=", "array", "(", "rarefaction_data", "[", "3", "]", ")", "[", ":", ",", "0", "]", ".", "max", "(", ")", "rare_mat", "=", "array", "(", "[", "row", "for", "row", "in", "rarefaction_data", "[", "3", "]", "if", "(", "row", "[", "0", "]", "==", "depth", ")", "]", ")", "rare_mat", "=", "rare_mat", ".", "mean", "(", "0", ")", "[", "2", ":", "]", "sids", "=", "rarefaction_data", "[", "0", "]", "[", "3", ":", "]", "return", "dict", "(", "izip", "(", "sids", ",", "rare_mat", ")", ")" ]
extract data rows from rarefaction data matrix .
train
false
12,481
@cleanup def test__EventCollection__get_positions(): (_, coll, props) = generate_EventCollection_plot() np.testing.assert_array_equal(props[u'positions'], coll.get_positions())
[ "@", "cleanup", "def", "test__EventCollection__get_positions", "(", ")", ":", "(", "_", ",", "coll", ",", "props", ")", "=", "generate_EventCollection_plot", "(", ")", "np", ".", "testing", ".", "assert_array_equal", "(", "props", "[", "u'positions'", "]", ",", "coll", ".", "get_positions", "(", ")", ")" ]
check to make sure the default positions match the input positions .
train
false
12,482
def cr_uid_context(method): method._api = 'cr_uid_context' return method
[ "def", "cr_uid_context", "(", "method", ")", ":", "method", ".", "_api", "=", "'cr_uid_context'", "return", "method" ]
decorate a traditional-style method that takes cr .
train
false
12,483
def get_projection_matrix(array_type=c_float, glGetMethod=glGetFloatv): m = (array_type * 16)() glGetMethod(GL_PROJECTION_MATRIX, m) return m
[ "def", "get_projection_matrix", "(", "array_type", "=", "c_float", ",", "glGetMethod", "=", "glGetFloatv", ")", ":", "m", "=", "(", "array_type", "*", "16", ")", "(", ")", "glGetMethod", "(", "GL_PROJECTION_MATRIX", ",", "m", ")", "return", "m" ]
returns the current modelview matrix .
train
false
12,484
def email_change_email(user): from r2.lib.pages import EmailChangeEmail return _system_email(user.email, EmailChangeEmail(user=user).render(style='email'), Email.Kind.EMAIL_CHANGE)
[ "def", "email_change_email", "(", "user", ")", ":", "from", "r2", ".", "lib", ".", "pages", "import", "EmailChangeEmail", "return", "_system_email", "(", "user", ".", "email", ",", "EmailChangeEmail", "(", "user", "=", "user", ")", ".", "render", "(", "style", "=", "'email'", ")", ",", "Email", ".", "Kind", ".", "EMAIL_CHANGE", ")" ]
queues a system email for a email change notification .
train
false
12,485
def get_complete_version(version=None): if (version is None): from django import VERSION as version else: assert (len(version) == 5) assert (version[3] in ('alpha', 'beta', 'rc', 'final')) return version
[ "def", "get_complete_version", "(", "version", "=", "None", ")", ":", "if", "(", "version", "is", "None", ")", ":", "from", "django", "import", "VERSION", "as", "version", "else", ":", "assert", "(", "len", "(", "version", ")", "==", "5", ")", "assert", "(", "version", "[", "3", "]", "in", "(", "'alpha'", ",", "'beta'", ",", "'rc'", ",", "'final'", ")", ")", "return", "version" ]
returns a tuple of the pootle version .
train
false
12,486
def libvlc_audio_set_delay(p_mi, i_delay): f = (_Cfunctions.get('libvlc_audio_set_delay', None) or _Cfunction('libvlc_audio_set_delay', ((1,), (1,)), None, ctypes.c_int, MediaPlayer, ctypes.c_int64)) return f(p_mi, i_delay)
[ "def", "libvlc_audio_set_delay", "(", "p_mi", ",", "i_delay", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_audio_set_delay'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_audio_set_delay'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "ctypes", ".", "c_int", ",", "MediaPlayer", ",", "ctypes", ".", "c_int64", ")", ")", "return", "f", "(", "p_mi", ",", "i_delay", ")" ]
set current audio delay .
train
true
12,488
def _get_cmap_norms(): cmap = get_cmap(u'RdBu', lut=5) clevs = [(-5.0), (-2.5), (-0.5), 0.5, 1.5, 3.5] norms = dict() norms[u'neither'] = BoundaryNorm(clevs, (len(clevs) - 1)) norms[u'min'] = BoundaryNorm(([(-10)] + clevs[1:]), (len(clevs) - 1)) norms[u'max'] = BoundaryNorm((clevs[:(-1)] + [10]), (len(clevs) - 1)) norms[u'both'] = BoundaryNorm((([(-10)] + clevs[1:(-1)]) + [10]), (len(clevs) - 1)) return (cmap, norms)
[ "def", "_get_cmap_norms", "(", ")", ":", "cmap", "=", "get_cmap", "(", "u'RdBu'", ",", "lut", "=", "5", ")", "clevs", "=", "[", "(", "-", "5.0", ")", ",", "(", "-", "2.5", ")", ",", "(", "-", "0.5", ")", ",", "0.5", ",", "1.5", ",", "3.5", "]", "norms", "=", "dict", "(", ")", "norms", "[", "u'neither'", "]", "=", "BoundaryNorm", "(", "clevs", ",", "(", "len", "(", "clevs", ")", "-", "1", ")", ")", "norms", "[", "u'min'", "]", "=", "BoundaryNorm", "(", "(", "[", "(", "-", "10", ")", "]", "+", "clevs", "[", "1", ":", "]", ")", ",", "(", "len", "(", "clevs", ")", "-", "1", ")", ")", "norms", "[", "u'max'", "]", "=", "BoundaryNorm", "(", "(", "clevs", "[", ":", "(", "-", "1", ")", "]", "+", "[", "10", "]", ")", ",", "(", "len", "(", "clevs", ")", "-", "1", ")", ")", "norms", "[", "u'both'", "]", "=", "BoundaryNorm", "(", "(", "(", "[", "(", "-", "10", ")", "]", "+", "clevs", "[", "1", ":", "(", "-", "1", ")", "]", ")", "+", "[", "10", "]", ")", ",", "(", "len", "(", "clevs", ")", "-", "1", ")", ")", "return", "(", "cmap", ",", "norms", ")" ]
define a colormap and appropriate norms for each of the four possible settings of the extend keyword .
train
false
12,489
def next_url(request): next = request.GET.get(u'next', request.POST.get(u'next', u'')) host = request.get_host() return (next if (next and is_safe_url(next, host=host)) else None)
[ "def", "next_url", "(", "request", ")", ":", "next", "=", "request", ".", "GET", ".", "get", "(", "u'next'", ",", "request", ".", "POST", ".", "get", "(", "u'next'", ",", "u''", ")", ")", "host", "=", "request", ".", "get_host", "(", ")", "return", "(", "next", "if", "(", "next", "and", "is_safe_url", "(", "next", ",", "host", "=", "host", ")", ")", "else", "None", ")" ]
returns url to redirect to from the next param in the request .
train
true
12,490
def remove_shim_context(event): if ('context' in event): context = event['context'] context_fields_to_remove = set(CONTEXT_FIELDS_TO_INCLUDE) context_fields_to_remove.add('client_id') for field in context_fields_to_remove: if (field in context): del context[field]
[ "def", "remove_shim_context", "(", "event", ")", ":", "if", "(", "'context'", "in", "event", ")", ":", "context", "=", "event", "[", "'context'", "]", "context_fields_to_remove", "=", "set", "(", "CONTEXT_FIELDS_TO_INCLUDE", ")", "context_fields_to_remove", ".", "add", "(", "'client_id'", ")", "for", "field", "in", "context_fields_to_remove", ":", "if", "(", "field", "in", "context", ")", ":", "del", "context", "[", "field", "]" ]
remove obsolete fields from event context .
train
false
12,491
def SplitPatch(data): patches = [] filename = None diff = [] for line in data.splitlines(True): new_filename = None if line.startswith('Index:'): (unused, new_filename) = line.split(':', 1) new_filename = new_filename.strip() elif line.startswith('Property changes on:'): (unused, temp_filename) = line.split(':', 1) temp_filename = to_slash(temp_filename.strip()) if (temp_filename != filename): new_filename = temp_filename if new_filename: if (filename and diff): patches.append((filename, ''.join(diff))) filename = new_filename diff = [line] continue if (diff is not None): diff.append(line) if (filename and diff): patches.append((filename, ''.join(diff))) return patches
[ "def", "SplitPatch", "(", "data", ")", ":", "patches", "=", "[", "]", "filename", "=", "None", "diff", "=", "[", "]", "for", "line", "in", "data", ".", "splitlines", "(", "True", ")", ":", "new_filename", "=", "None", "if", "line", ".", "startswith", "(", "'Index:'", ")", ":", "(", "unused", ",", "new_filename", ")", "=", "line", ".", "split", "(", "':'", ",", "1", ")", "new_filename", "=", "new_filename", ".", "strip", "(", ")", "elif", "line", ".", "startswith", "(", "'Property changes on:'", ")", ":", "(", "unused", ",", "temp_filename", ")", "=", "line", ".", "split", "(", "':'", ",", "1", ")", "temp_filename", "=", "to_slash", "(", "temp_filename", ".", "strip", "(", ")", ")", "if", "(", "temp_filename", "!=", "filename", ")", ":", "new_filename", "=", "temp_filename", "if", "new_filename", ":", "if", "(", "filename", "and", "diff", ")", ":", "patches", ".", "append", "(", "(", "filename", ",", "''", ".", "join", "(", "diff", ")", ")", ")", "filename", "=", "new_filename", "diff", "=", "[", "line", "]", "continue", "if", "(", "diff", "is", "not", "None", ")", ":", "diff", ".", "append", "(", "line", ")", "if", "(", "filename", "and", "diff", ")", ":", "patches", ".", "append", "(", "(", "filename", ",", "''", ".", "join", "(", "diff", ")", ")", ")", "return", "patches" ]
splits a patch into separate pieces for each file .
train
false
12,492
def strip_default(arg): i = arg.find('=') if (i == (-1)): return arg t = arg[:i].strip() return t
[ "def", "strip_default", "(", "arg", ")", ":", "i", "=", "arg", ".", "find", "(", "'='", ")", "if", "(", "i", "==", "(", "-", "1", ")", ")", ":", "return", "arg", "t", "=", "arg", "[", ":", "i", "]", ".", "strip", "(", ")", "return", "t" ]
return the argname from an arg = default string .
train
false
12,494
def _is_num_param(names, values, to_float=False): fun = ((to_float and float) or int) out_params = [] for (name, val) in zip(names, values): if (val is None): out_params.append(val) elif isinstance(val, (int, long, float, string_type)): try: out_params.append(fun(val)) except ValueError as e: raise VdtParamError(name, val) else: raise VdtParamError(name, val) return out_params
[ "def", "_is_num_param", "(", "names", ",", "values", ",", "to_float", "=", "False", ")", ":", "fun", "=", "(", "(", "to_float", "and", "float", ")", "or", "int", ")", "out_params", "=", "[", "]", "for", "(", "name", ",", "val", ")", "in", "zip", "(", "names", ",", "values", ")", ":", "if", "(", "val", "is", "None", ")", ":", "out_params", ".", "append", "(", "val", ")", "elif", "isinstance", "(", "val", ",", "(", "int", ",", "long", ",", "float", ",", "string_type", ")", ")", ":", "try", ":", "out_params", ".", "append", "(", "fun", "(", "val", ")", ")", "except", "ValueError", "as", "e", ":", "raise", "VdtParamError", "(", "name", ",", "val", ")", "else", ":", "raise", "VdtParamError", "(", "name", ",", "val", ")", "return", "out_params" ]
return numbers from inputs or raise vdtparamerror .
train
true
12,495
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
12,497
def _lazy_colorama_init(): pass
[ "def", "_lazy_colorama_init", "(", ")", ":", "pass" ]
lazily init colorama if necessary .
train
false
12,498
def _find_volume(name): docker_volumes = __salt__['dockerng.volumes']()['Volumes'] if docker_volumes: volumes = [v for v in docker_volumes if (v['Name'] == name)] if volumes: return volumes[0] return None
[ "def", "_find_volume", "(", "name", ")", ":", "docker_volumes", "=", "__salt__", "[", "'dockerng.volumes'", "]", "(", ")", "[", "'Volumes'", "]", "if", "docker_volumes", ":", "volumes", "=", "[", "v", "for", "v", "in", "docker_volumes", "if", "(", "v", "[", "'Name'", "]", "==", "name", ")", "]", "if", "volumes", ":", "return", "volumes", "[", "0", "]", "return", "None" ]
find volume by name on minion .
train
true
12,501
@logic.validate(logic.schema.default_activity_list_schema) def user_activity_list(context, data_dict): _check_access('user_show', context, data_dict) model = context['model'] user_ref = data_dict.get('id') user = model.User.get(user_ref) if (user is None): raise logic.NotFound offset = data_dict.get('offset', 0) limit = int(data_dict.get('limit', config.get('ckan.activity_list_limit', 31))) _activity_objects = model.activity.user_activity_list(user.id, limit=limit, offset=offset) activity_objects = _filter_activity_by_user(_activity_objects, _activity_stream_get_filtered_users()) return model_dictize.activity_list_dictize(activity_objects, context)
[ "@", "logic", ".", "validate", "(", "logic", ".", "schema", ".", "default_activity_list_schema", ")", "def", "user_activity_list", "(", "context", ",", "data_dict", ")", ":", "_check_access", "(", "'user_show'", ",", "context", ",", "data_dict", ")", "model", "=", "context", "[", "'model'", "]", "user_ref", "=", "data_dict", ".", "get", "(", "'id'", ")", "user", "=", "model", ".", "User", ".", "get", "(", "user_ref", ")", "if", "(", "user", "is", "None", ")", ":", "raise", "logic", ".", "NotFound", "offset", "=", "data_dict", ".", "get", "(", "'offset'", ",", "0", ")", "limit", "=", "int", "(", "data_dict", ".", "get", "(", "'limit'", ",", "config", ".", "get", "(", "'ckan.activity_list_limit'", ",", "31", ")", ")", ")", "_activity_objects", "=", "model", ".", "activity", ".", "user_activity_list", "(", "user", ".", "id", ",", "limit", "=", "limit", ",", "offset", "=", "offset", ")", "activity_objects", "=", "_filter_activity_by_user", "(", "_activity_objects", ",", "_activity_stream_get_filtered_users", "(", ")", ")", "return", "model_dictize", ".", "activity_list_dictize", "(", "activity_objects", ",", "context", ")" ]
return user_ids public activity stream .
train
false
12,502
def document_iters_to_multipart_byteranges(ranges_iter, boundary): divider = (('--' + boundary) + '\r\n') terminator = (('--' + boundary) + '--') for range_spec in ranges_iter: start_byte = range_spec['start_byte'] end_byte = range_spec['end_byte'] entity_length = range_spec.get('entity_length', '*') content_type = range_spec['content_type'] part_iter = range_spec['part_iter'] part_header = ''.join((divider, 'Content-Type: ', str(content_type), '\r\n', 'Content-Range: ', ('bytes %d-%d/%s\r\n' % (start_byte, end_byte, entity_length)), '\r\n')) (yield part_header) for chunk in part_iter: (yield chunk) (yield '\r\n') (yield terminator)
[ "def", "document_iters_to_multipart_byteranges", "(", "ranges_iter", ",", "boundary", ")", ":", "divider", "=", "(", "(", "'--'", "+", "boundary", ")", "+", "'\\r\\n'", ")", "terminator", "=", "(", "(", "'--'", "+", "boundary", ")", "+", "'--'", ")", "for", "range_spec", "in", "ranges_iter", ":", "start_byte", "=", "range_spec", "[", "'start_byte'", "]", "end_byte", "=", "range_spec", "[", "'end_byte'", "]", "entity_length", "=", "range_spec", ".", "get", "(", "'entity_length'", ",", "'*'", ")", "content_type", "=", "range_spec", "[", "'content_type'", "]", "part_iter", "=", "range_spec", "[", "'part_iter'", "]", "part_header", "=", "''", ".", "join", "(", "(", "divider", ",", "'Content-Type: '", ",", "str", "(", "content_type", ")", ",", "'\\r\\n'", ",", "'Content-Range: '", ",", "(", "'bytes %d-%d/%s\\r\\n'", "%", "(", "start_byte", ",", "end_byte", ",", "entity_length", ")", ")", ",", "'\\r\\n'", ")", ")", "(", "yield", "part_header", ")", "for", "chunk", "in", "part_iter", ":", "(", "yield", "chunk", ")", "(", "yield", "'\\r\\n'", ")", "(", "yield", "terminator", ")" ]
takes an iterator of range iters and yields a multipart/byteranges mime document suitable for sending as the body of a multi-range 206 response .
train
false
12,505
def _footer_openedx_link(): title = _('Powered by Open edX') return {'url': settings.FOOTER_OPENEDX_URL, 'title': title, 'image': settings.FOOTER_OPENEDX_LOGO_IMAGE}
[ "def", "_footer_openedx_link", "(", ")", ":", "title", "=", "_", "(", "'Powered by Open edX'", ")", "return", "{", "'url'", ":", "settings", ".", "FOOTER_OPENEDX_URL", ",", "'title'", ":", "title", ",", "'image'", ":", "settings", ".", "FOOTER_OPENEDX_LOGO_IMAGE", "}" ]
return the image link for "powered by openedx" .
train
false
12,506
def in_preview_mode(): hostname = get_current_request_hostname() preview_lms_base = settings.FEATURES.get('PREVIEW_LMS_BASE', None) return bool((preview_lms_base and hostname and (hostname.split(':')[0] == preview_lms_base.split(':')[0])))
[ "def", "in_preview_mode", "(", ")", ":", "hostname", "=", "get_current_request_hostname", "(", ")", "preview_lms_base", "=", "settings", ".", "FEATURES", ".", "get", "(", "'PREVIEW_LMS_BASE'", ",", "None", ")", "return", "bool", "(", "(", "preview_lms_base", "and", "hostname", "and", "(", "hostname", ".", "split", "(", "':'", ")", "[", "0", "]", "==", "preview_lms_base", ".", "split", "(", "':'", ")", "[", "0", "]", ")", ")", ")" ]
returns whether the user is in preview mode or not .
train
false
12,508
def validate_dataframe(array): from numbers import Number for vector in array: if isinstance(vector[0], Number): if (not all((isinstance(item, Number) for item in vector))): raise exceptions.PlotlyError('Error in dataframe. Make sure all entries of each column are either numbers or strings.') elif isinstance(vector[0], str): if (not all((isinstance(item, str) for item in vector))): raise exceptions.PlotlyError('Error in dataframe. Make sure all entries of each column are either numbers or strings.')
[ "def", "validate_dataframe", "(", "array", ")", ":", "from", "numbers", "import", "Number", "for", "vector", "in", "array", ":", "if", "isinstance", "(", "vector", "[", "0", "]", ",", "Number", ")", ":", "if", "(", "not", "all", "(", "(", "isinstance", "(", "item", ",", "Number", ")", "for", "item", "in", "vector", ")", ")", ")", ":", "raise", "exceptions", ".", "PlotlyError", "(", "'Error in dataframe. Make sure all entries of each column are either numbers or strings.'", ")", "elif", "isinstance", "(", "vector", "[", "0", "]", ",", "str", ")", ":", "if", "(", "not", "all", "(", "(", "isinstance", "(", "item", ",", "str", ")", "for", "item", "in", "vector", ")", ")", ")", ":", "raise", "exceptions", ".", "PlotlyError", "(", "'Error in dataframe. Make sure all entries of each column are either numbers or strings.'", ")" ]
validates all strings or numbers in each dataframe column :raises: if there are any two items in any list whose types differ .
train
false
12,509
def generate_short_url(long_url): if ((settings.BITLY_LOGIN is None) or (settings.BITLY_API_KEY is None)): return '' keys = {'format': 'json', 'longUrl': long_url, 'login': settings.BITLY_LOGIN, 'apiKey': settings.BITLY_API_KEY} params = urlencode(keys) resp = requests.post(settings.BITLY_API_URL, params).json() if (resp['status_code'] == 200): short_url = resp.get('data', {}).get('url', '') return short_url elif (resp['status_code'] == 401): raise BitlyUnauthorizedException("Unauthorized access to bitly's API") elif (resp['status_code'] == 403): raise BitlyRateLimitException("Rate limit exceeded while using bitly's API.") else: raise BitlyException("Error code: {0} recieved from bitly's API.".format(resp['status_code']))
[ "def", "generate_short_url", "(", "long_url", ")", ":", "if", "(", "(", "settings", ".", "BITLY_LOGIN", "is", "None", ")", "or", "(", "settings", ".", "BITLY_API_KEY", "is", "None", ")", ")", ":", "return", "''", "keys", "=", "{", "'format'", ":", "'json'", ",", "'longUrl'", ":", "long_url", ",", "'login'", ":", "settings", ".", "BITLY_LOGIN", ",", "'apiKey'", ":", "settings", ".", "BITLY_API_KEY", "}", "params", "=", "urlencode", "(", "keys", ")", "resp", "=", "requests", ".", "post", "(", "settings", ".", "BITLY_API_URL", ",", "params", ")", ".", "json", "(", ")", "if", "(", "resp", "[", "'status_code'", "]", "==", "200", ")", ":", "short_url", "=", "resp", ".", "get", "(", "'data'", ",", "{", "}", ")", ".", "get", "(", "'url'", ",", "''", ")", "return", "short_url", "elif", "(", "resp", "[", "'status_code'", "]", "==", "401", ")", ":", "raise", "BitlyUnauthorizedException", "(", "\"Unauthorized access to bitly's API\"", ")", "elif", "(", "resp", "[", "'status_code'", "]", "==", "403", ")", ":", "raise", "BitlyRateLimitException", "(", "\"Rate limit exceeded while using bitly's API.\"", ")", "else", ":", "raise", "BitlyException", "(", "\"Error code: {0} recieved from bitly's API.\"", ".", "format", "(", "resp", "[", "'status_code'", "]", ")", ")" ]
return a shortned url for a given long_url via bitlys api .
train
false
12,510
def systemInformationType13(): a = L2PseudoLength(l2pLength=0) b = TpPd(pd=6) c = MessageType(mesType=0) d = Si13RestOctets() packet = (((a / b) / c) / d) return packet
[ "def", "systemInformationType13", "(", ")", ":", "a", "=", "L2PseudoLength", "(", "l2pLength", "=", "0", ")", "b", "=", "TpPd", "(", "pd", "=", "6", ")", "c", "=", "MessageType", "(", "mesType", "=", "0", ")", "d", "=", "Si13RestOctets", "(", ")", "packet", "=", "(", "(", "(", "a", "/", "b", ")", "/", "c", ")", "/", "d", ")", "return", "packet" ]
system information type 13 section 9 .
train
true
12,511
def slash_url_suffix(url, suffix): return ((url.endswith('/') and ('%s/' % suffix)) or ('/%s' % suffix))
[ "def", "slash_url_suffix", "(", "url", ",", "suffix", ")", ":", "return", "(", "(", "url", ".", "endswith", "(", "'/'", ")", "and", "(", "'%s/'", "%", "suffix", ")", ")", "or", "(", "'/%s'", "%", "suffix", ")", ")" ]
adds a slash either to the beginning or the end of a suffix .
train
false
12,512
def writeContentsFile(documentDirectoryPath, hypertextFiles): output = cStringIO.StringIO() output.write('<html>\n <head>\n <title>Contents</title>\n </head>\n <body>\n') navigationLine = getNavigationLine('Contents', 'Previous', getNextLinkText(hypertextFiles, 0)) output.write(navigationLine) for hypertextFile in hypertextFiles: writeContentsLine(hypertextFile, output) output.write(navigationLine) output.write(' </body>\n</html>\n') filePath = os.path.join(documentDirectoryPath, 'contents.html') archive.writeFileText(filePath, output.getvalue())
[ "def", "writeContentsFile", "(", "documentDirectoryPath", ",", "hypertextFiles", ")", ":", "output", "=", "cStringIO", ".", "StringIO", "(", ")", "output", ".", "write", "(", "'<html>\\n <head>\\n <title>Contents</title>\\n </head>\\n <body>\\n'", ")", "navigationLine", "=", "getNavigationLine", "(", "'Contents'", ",", "'Previous'", ",", "getNextLinkText", "(", "hypertextFiles", ",", "0", ")", ")", "output", ".", "write", "(", "navigationLine", ")", "for", "hypertextFile", "in", "hypertextFiles", ":", "writeContentsLine", "(", "hypertextFile", ",", "output", ")", "output", ".", "write", "(", "navigationLine", ")", "output", ".", "write", "(", "' </body>\\n</html>\\n'", ")", "filePath", "=", "os", ".", "path", ".", "join", "(", "documentDirectoryPath", ",", "'contents.html'", ")", "archive", ".", "writeFileText", "(", "filePath", ",", "output", ".", "getvalue", "(", ")", ")" ]
write the contents file .
train
false
12,514
def aggregate_host_get_by_metadata_key(context, key): return IMPL.aggregate_host_get_by_metadata_key(context, key)
[ "def", "aggregate_host_get_by_metadata_key", "(", "context", ",", "key", ")", ":", "return", "IMPL", ".", "aggregate_host_get_by_metadata_key", "(", "context", ",", "key", ")" ]
get hosts with a specific metadata key metadata for all aggregates .
train
false
12,516
def PyToPB(numobj): numpb = PhoneNumberPB() if (numobj.country_code is not None): numpb.country_code = numobj.country_code if (numobj.national_number is not None): numpb.national_number = numobj.national_number if (numobj.extension is not None): numpb.extension = numobj.extension if (numobj.italian_leading_zero is not None): numpb.italian_leading_zero = numobj.italian_leading_zero if (numobj.number_of_leading_zeros is not None): numpb.number_of_leading_zeros = numobj.number_of_leading_zeros if (numobj.raw_input is not None): numpb.raw_input = numobj.raw_input if (numobj.country_code_source is not None): numpb.country_code_source = numobj.country_code_source if (numobj.preferred_domestic_carrier_code is not None): numpb.preferred_domestic_carrier_code = numobj.preferred_domestic_carrier_code return numpb
[ "def", "PyToPB", "(", "numobj", ")", ":", "numpb", "=", "PhoneNumberPB", "(", ")", "if", "(", "numobj", ".", "country_code", "is", "not", "None", ")", ":", "numpb", ".", "country_code", "=", "numobj", ".", "country_code", "if", "(", "numobj", ".", "national_number", "is", "not", "None", ")", ":", "numpb", ".", "national_number", "=", "numobj", ".", "national_number", "if", "(", "numobj", ".", "extension", "is", "not", "None", ")", ":", "numpb", ".", "extension", "=", "numobj", ".", "extension", "if", "(", "numobj", ".", "italian_leading_zero", "is", "not", "None", ")", ":", "numpb", ".", "italian_leading_zero", "=", "numobj", ".", "italian_leading_zero", "if", "(", "numobj", ".", "number_of_leading_zeros", "is", "not", "None", ")", ":", "numpb", ".", "number_of_leading_zeros", "=", "numobj", ".", "number_of_leading_zeros", "if", "(", "numobj", ".", "raw_input", "is", "not", "None", ")", ":", "numpb", ".", "raw_input", "=", "numobj", ".", "raw_input", "if", "(", "numobj", ".", "country_code_source", "is", "not", "None", ")", ":", "numpb", ".", "country_code_source", "=", "numobj", ".", "country_code_source", "if", "(", "numobj", ".", "preferred_domestic_carrier_code", "is", "not", "None", ")", ":", "numpb", ".", "preferred_domestic_carrier_code", "=", "numobj", ".", "preferred_domestic_carrier_code", "return", "numpb" ]
convert phonenumber .
train
true
12,517
@app.route('/account/<subscription_id>/resourcegroups/<resource_group_name>/storageaccounts/<account_name>/queues/<queue_name>') @auth.require_login def storageaccount_queue_view(subscription_id, resource_group_name, account_name, queue_name): creds = _get_credentials() model = models.get_queue_details(subscription_id, creds, resource_group_name, account_name, queue_name) return render_template('storageaccount_queue.html', title=queue_name, year=datetime.now().year, subscription_id=subscription_id, resource_group_name=resource_group_name, account_name=account_name, model=model)
[ "@", "app", ".", "route", "(", "'/account/<subscription_id>/resourcegroups/<resource_group_name>/storageaccounts/<account_name>/queues/<queue_name>'", ")", "@", "auth", ".", "require_login", "def", "storageaccount_queue_view", "(", "subscription_id", ",", "resource_group_name", ",", "account_name", ",", "queue_name", ")", ":", "creds", "=", "_get_credentials", "(", ")", "model", "=", "models", ".", "get_queue_details", "(", "subscription_id", ",", "creds", ",", "resource_group_name", ",", "account_name", ",", "queue_name", ")", "return", "render_template", "(", "'storageaccount_queue.html'", ",", "title", "=", "queue_name", ",", "year", "=", "datetime", ".", "now", "(", ")", ".", "year", ",", "subscription_id", "=", "subscription_id", ",", "resource_group_name", "=", "resource_group_name", ",", "account_name", "=", "account_name", ",", "model", "=", "model", ")" ]
renders the storage account queue details .
train
false
12,518
def assert_all_ineq_give_class_Inequality(a, b): from sympy.core.relational import _Inequality as Inequality assert isinstance((a > b), Inequality) assert isinstance((a >= b), Inequality) assert isinstance((a < b), Inequality) assert isinstance((a <= b), Inequality) assert isinstance((b > a), Inequality) assert isinstance((b >= a), Inequality) assert isinstance((b < a), Inequality) assert isinstance((b <= a), Inequality)
[ "def", "assert_all_ineq_give_class_Inequality", "(", "a", ",", "b", ")", ":", "from", "sympy", ".", "core", ".", "relational", "import", "_Inequality", "as", "Inequality", "assert", "isinstance", "(", "(", "a", ">", "b", ")", ",", "Inequality", ")", "assert", "isinstance", "(", "(", "a", ">=", "b", ")", ",", "Inequality", ")", "assert", "isinstance", "(", "(", "a", "<", "b", ")", ",", "Inequality", ")", "assert", "isinstance", "(", "(", "a", "<=", "b", ")", ",", "Inequality", ")", "assert", "isinstance", "(", "(", "b", ">", "a", ")", ",", "Inequality", ")", "assert", "isinstance", "(", "(", "b", ">=", "a", ")", ",", "Inequality", ")", "assert", "isinstance", "(", "(", "b", "<", "a", ")", ",", "Inequality", ")", "assert", "isinstance", "(", "(", "b", "<=", "a", ")", ",", "Inequality", ")" ]
all inequality operations on a and b result in class inequality .
train
false
12,519
@image_comparison(baseline_images=[u'EventCollection_plot__set_orientation']) def test__EventCollection__set_orientation(): (splt, coll, props) = generate_EventCollection_plot() new_orientation = u'vertical' coll.set_orientation(new_orientation) assert_equal(new_orientation, coll.get_orientation()) assert_equal(False, coll.is_horizontal()) check_segments(coll, props[u'positions'], props[u'linelength'], props[u'lineoffset'], new_orientation) splt.set_title(u'EventCollection: set_orientation') splt.set_ylim((-1), 22) splt.set_xlim(0, 2)
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'EventCollection_plot__set_orientation'", "]", ")", "def", "test__EventCollection__set_orientation", "(", ")", ":", "(", "splt", ",", "coll", ",", "props", ")", "=", "generate_EventCollection_plot", "(", ")", "new_orientation", "=", "u'vertical'", "coll", ".", "set_orientation", "(", "new_orientation", ")", "assert_equal", "(", "new_orientation", ",", "coll", ".", "get_orientation", "(", ")", ")", "assert_equal", "(", "False", ",", "coll", ".", "is_horizontal", "(", ")", ")", "check_segments", "(", "coll", ",", "props", "[", "u'positions'", "]", ",", "props", "[", "u'linelength'", "]", ",", "props", "[", "u'lineoffset'", "]", ",", "new_orientation", ")", "splt", ".", "set_title", "(", "u'EventCollection: set_orientation'", ")", "splt", ".", "set_ylim", "(", "(", "-", "1", ")", ",", "22", ")", "splt", ".", "set_xlim", "(", "0", ",", "2", ")" ]
check to make sure set_orientation works properly .
train
false
12,520
def on_commit(func, using=None): get_connection(using).on_commit(func)
[ "def", "on_commit", "(", "func", ",", "using", "=", "None", ")", ":", "get_connection", "(", "using", ")", ".", "on_commit", "(", "func", ")" ]
register func to be called when the current transaction is committed .
train
false
12,521
def sine_transform(f, x, k, **hints): return SineTransform(f, x, k).doit(**hints)
[ "def", "sine_transform", "(", "f", ",", "x", ",", "k", ",", "**", "hints", ")", ":", "return", "SineTransform", "(", "f", ",", "x", ",", "k", ")", ".", "doit", "(", "**", "hints", ")" ]
compute the unitary .
train
false
12,522
def array_agg(*arg, **kw): kw['type_'] = ARRAY(functions._type_from_args(arg)) return functions.func.array_agg(*arg, **kw)
[ "def", "array_agg", "(", "*", "arg", ",", "**", "kw", ")", ":", "kw", "[", "'type_'", "]", "=", "ARRAY", "(", "functions", ".", "_type_from_args", "(", "arg", ")", ")", "return", "functions", ".", "func", ".", "array_agg", "(", "*", "arg", ",", "**", "kw", ")" ]
postgresql-specific form of :class: .
train
false
12,523
def get_scaled_average_rating(ratings): n = get_number_of_ratings(ratings) if (n == 0): return 0 average_rating = get_average_rating(ratings) z = 1.9599639715843482 x = ((average_rating - 1) / 4) a = (x + ((z ** 2) / (2 * n))) b = (z * math.sqrt((((x * (1 - x)) / n) + ((z ** 2) / (4 * (n ** 2)))))) wilson_score_lower_bound = ((a - b) / (1 + ((z ** 2) / n))) return (1 + (4 * wilson_score_lower_bound))
[ "def", "get_scaled_average_rating", "(", "ratings", ")", ":", "n", "=", "get_number_of_ratings", "(", "ratings", ")", "if", "(", "n", "==", "0", ")", ":", "return", "0", "average_rating", "=", "get_average_rating", "(", "ratings", ")", "z", "=", "1.9599639715843482", "x", "=", "(", "(", "average_rating", "-", "1", ")", "/", "4", ")", "a", "=", "(", "x", "+", "(", "(", "z", "**", "2", ")", "/", "(", "2", "*", "n", ")", ")", ")", "b", "=", "(", "z", "*", "math", ".", "sqrt", "(", "(", "(", "(", "x", "*", "(", "1", "-", "x", ")", ")", "/", "n", ")", "+", "(", "(", "z", "**", "2", ")", "/", "(", "4", "*", "(", "n", "**", "2", ")", ")", ")", ")", ")", ")", "wilson_score_lower_bound", "=", "(", "(", "a", "-", "b", ")", "/", "(", "1", "+", "(", "(", "z", "**", "2", ")", "/", "n", ")", ")", ")", "return", "(", "1", "+", "(", "4", "*", "wilson_score_lower_bound", ")", ")" ]
returns the lower bound wilson score of the ratings as a float .
train
false
12,525
@cbook.deprecated(u'2.1') def ttfdict_to_fnames(d): fnames = [] for named in six.itervalues(d): for styled in six.itervalues(named): for variantd in six.itervalues(styled): for weightd in six.itervalues(variantd): for stretchd in six.itervalues(weightd): for fname in six.itervalues(stretchd): fnames.append(fname) return fnames
[ "@", "cbook", ".", "deprecated", "(", "u'2.1'", ")", "def", "ttfdict_to_fnames", "(", "d", ")", ":", "fnames", "=", "[", "]", "for", "named", "in", "six", ".", "itervalues", "(", "d", ")", ":", "for", "styled", "in", "six", ".", "itervalues", "(", "named", ")", ":", "for", "variantd", "in", "six", ".", "itervalues", "(", "styled", ")", ":", "for", "weightd", "in", "six", ".", "itervalues", "(", "variantd", ")", ":", "for", "stretchd", "in", "six", ".", "itervalues", "(", "weightd", ")", ":", "for", "fname", "in", "six", ".", "itervalues", "(", "stretchd", ")", ":", "fnames", ".", "append", "(", "fname", ")", "return", "fnames" ]
flatten a ttfdict to all the filenames it contains .
train
false
12,526
def task_log_end_task(context, task_name, period_beginning, period_ending, host, errors, message=None): return IMPL.task_log_end_task(context, task_name, period_beginning, period_ending, host, errors, message)
[ "def", "task_log_end_task", "(", "context", ",", "task_name", ",", "period_beginning", ",", "period_ending", ",", "host", ",", "errors", ",", "message", "=", "None", ")", ":", "return", "IMPL", ".", "task_log_end_task", "(", "context", ",", "task_name", ",", "period_beginning", ",", "period_ending", ",", "host", ",", "errors", ",", "message", ")" ]
mark a task as complete for a given host/time period .
train
false
12,527
def read_trans(fname): (fid, tree, directory) = fiff_open(fname) with fid: for t in directory: if (t.kind == FIFF.FIFF_COORD_TRANS): tag = read_tag(fid, t.pos) break else: raise IOError('This does not seem to be a -trans.fif file.') trans = tag.data return trans
[ "def", "read_trans", "(", "fname", ")", ":", "(", "fid", ",", "tree", ",", "directory", ")", "=", "fiff_open", "(", "fname", ")", "with", "fid", ":", "for", "t", "in", "directory", ":", "if", "(", "t", ".", "kind", "==", "FIFF", ".", "FIFF_COORD_TRANS", ")", ":", "tag", "=", "read_tag", "(", "fid", ",", "t", ".", "pos", ")", "break", "else", ":", "raise", "IOError", "(", "'This does not seem to be a -trans.fif file.'", ")", "trans", "=", "tag", ".", "data", "return", "trans" ]
read a -trans .
train
false
12,528
def build_graph_args(descr=None, epilog=None, parser=None, citations=None): if (parser is None): parser = KhmerArgumentParser(description=descr, epilog=epilog, citations=citations) parser.add_argument(u'--ksize', u'-k', type=int, default=DEFAULT_K, help=u'k-mer size to use') parser.add_argument(u'--n_tables', u'-N', type=int, default=DEFAULT_N_TABLES, help=u'number of tables to use in k-mer countgraph') parser.add_argument(u'-U', u'--unique-kmers', type=float, default=0, help=u'approximate number of unique kmers in the input set') parser.add_argument(u'--fp-rate', type=float, default=None, help=u'Override the automatic FP rate setting for the current script') group = parser.add_mutually_exclusive_group() group.add_argument(u'--max-tablesize', u'-x', type=float, default=DEFAULT_MAX_TABLESIZE, help=(u'upper bound on tablesize to use; overrides ' + u'--max-memory-usage/-M')) group.add_argument(u'-M', u'--max-memory-usage', type=memory_setting, help=(u'maximum amount of memory to use for data ' + u'structure')) return parser
[ "def", "build_graph_args", "(", "descr", "=", "None", ",", "epilog", "=", "None", ",", "parser", "=", "None", ",", "citations", "=", "None", ")", ":", "if", "(", "parser", "is", "None", ")", ":", "parser", "=", "KhmerArgumentParser", "(", "description", "=", "descr", ",", "epilog", "=", "epilog", ",", "citations", "=", "citations", ")", "parser", ".", "add_argument", "(", "u'--ksize'", ",", "u'-k'", ",", "type", "=", "int", ",", "default", "=", "DEFAULT_K", ",", "help", "=", "u'k-mer size to use'", ")", "parser", ".", "add_argument", "(", "u'--n_tables'", ",", "u'-N'", ",", "type", "=", "int", ",", "default", "=", "DEFAULT_N_TABLES", ",", "help", "=", "u'number of tables to use in k-mer countgraph'", ")", "parser", ".", "add_argument", "(", "u'-U'", ",", "u'--unique-kmers'", ",", "type", "=", "float", ",", "default", "=", "0", ",", "help", "=", "u'approximate number of unique kmers in the input set'", ")", "parser", ".", "add_argument", "(", "u'--fp-rate'", ",", "type", "=", "float", ",", "default", "=", "None", ",", "help", "=", "u'Override the automatic FP rate setting for the current script'", ")", "group", "=", "parser", ".", "add_mutually_exclusive_group", "(", ")", "group", ".", "add_argument", "(", "u'--max-tablesize'", ",", "u'-x'", ",", "type", "=", "float", ",", "default", "=", "DEFAULT_MAX_TABLESIZE", ",", "help", "=", "(", "u'upper bound on tablesize to use; overrides '", "+", "u'--max-memory-usage/-M'", ")", ")", "group", ".", "add_argument", "(", "u'-M'", ",", "u'--max-memory-usage'", ",", "type", "=", "memory_setting", ",", "help", "=", "(", "u'maximum amount of memory to use for data '", "+", "u'structure'", ")", ")", "return", "parser" ]
build an argumentparser with args for bloom filter based scripts .
train
false
12,530
@hook.command('twuser', 'twinfo') def twuser(text): if (tw_api is None): return try: user = tw_api.get_user(text) except tweepy.error.TweepError as e: if ('404' in e.reason): return 'Could not find user.' else: return 'Error: {}'.format(e.reason) if user.verified: prefix = '\\u2713' else: prefix = '' if user.location: loc_str = ' is located in \x02{}\x02 and'.format(user.location) else: loc_str = '' if user.description: desc_str = ' The users description is "{}"'.format(user.description) else: desc_str = '' return '{}@\x02{}\x02 ({}){} has \x02{:,}\x02 tweets and \x02{:,}\x02 followers.{}'.format(prefix, user.screen_name, user.name, loc_str, user.statuses_count, user.followers_count, desc_str)
[ "@", "hook", ".", "command", "(", "'twuser'", ",", "'twinfo'", ")", "def", "twuser", "(", "text", ")", ":", "if", "(", "tw_api", "is", "None", ")", ":", "return", "try", ":", "user", "=", "tw_api", ".", "get_user", "(", "text", ")", "except", "tweepy", ".", "error", ".", "TweepError", "as", "e", ":", "if", "(", "'404'", "in", "e", ".", "reason", ")", ":", "return", "'Could not find user.'", "else", ":", "return", "'Error: {}'", ".", "format", "(", "e", ".", "reason", ")", "if", "user", ".", "verified", ":", "prefix", "=", "'\\\\u2713'", "else", ":", "prefix", "=", "''", "if", "user", ".", "location", ":", "loc_str", "=", "' is located in \\x02{}\\x02 and'", ".", "format", "(", "user", ".", "location", ")", "else", ":", "loc_str", "=", "''", "if", "user", ".", "description", ":", "desc_str", "=", "' The users description is \"{}\"'", ".", "format", "(", "user", ".", "description", ")", "else", ":", "desc_str", "=", "''", "return", "'{}@\\x02{}\\x02 ({}){} has \\x02{:,}\\x02 tweets and \\x02{:,}\\x02 followers.{}'", ".", "format", "(", "prefix", ",", "user", ".", "screen_name", ",", "user", ".", "name", ",", "loc_str", ",", "user", ".", "statuses_count", ",", "user", ".", "followers_count", ",", "desc_str", ")" ]
twuser <user> -- get info on the twitter user <user> .
train
false
12,531
@register(u'forward-char') def forward_char(event): buff = event.current_buffer buff.cursor_position += buff.document.get_cursor_right_position(count=event.arg)
[ "@", "register", "(", "u'forward-char'", ")", "def", "forward_char", "(", "event", ")", ":", "buff", "=", "event", ".", "current_buffer", "buff", ".", "cursor_position", "+=", "buff", ".", "document", ".", "get_cursor_right_position", "(", "count", "=", "event", ".", "arg", ")" ]
move forward a character .
train
true
12,532
def public(type=None, **kwargs): def wrapper(function): '\n Just return the function (for now)\n ' return function return wrapper
[ "def", "public", "(", "type", "=", "None", ",", "**", "kwargs", ")", ":", "def", "wrapper", "(", "function", ")", ":", "return", "function", "return", "wrapper" ]
decorator to declare which methods are publicly accessible as http requests .
train
false
12,533
def init_checks_registry(): mod = inspect.getmodule(register_check) for (name, function) in inspect.getmembers(mod, inspect.isfunction): register_check(function)
[ "def", "init_checks_registry", "(", ")", ":", "mod", "=", "inspect", ".", "getmodule", "(", "register_check", ")", "for", "(", "name", ",", "function", ")", "in", "inspect", ".", "getmembers", "(", "mod", ",", "inspect", ".", "isfunction", ")", ":", "register_check", "(", "function", ")" ]
register all globally visible functions .
train
true
12,534
def _writeline(text): width = util.getxy().width spaces = ((width - len(text)) - 1) if mswin: spaces = (-1) text = text[:(width - 3)] sys.stdout.write((((' ' + text) + (' ' * spaces)) + '\r')) sys.stdout.flush()
[ "def", "_writeline", "(", "text", ")", ":", "width", "=", "util", ".", "getxy", "(", ")", ".", "width", "spaces", "=", "(", "(", "width", "-", "len", "(", "text", ")", ")", "-", "1", ")", "if", "mswin", ":", "spaces", "=", "(", "-", "1", ")", "text", "=", "text", "[", ":", "(", "width", "-", "3", ")", "]", "sys", ".", "stdout", ".", "write", "(", "(", "(", "(", "' '", "+", "text", ")", "+", "(", "' '", "*", "spaces", ")", ")", "+", "'\\r'", ")", ")", "sys", ".", "stdout", ".", "flush", "(", ")" ]
print text on same line .
train
false
12,535
def Sorted(lst): sorted_list = lst[:] sorted_list.sort() return sorted_list
[ "def", "Sorted", "(", "lst", ")", ":", "sorted_list", "=", "lst", "[", ":", "]", "sorted_list", ".", "sort", "(", ")", "return", "sorted_list" ]
equivalent of sorted() .
train
false
12,537
def read_checkpoint(out_fp): pickle_fh = open(out_fp, 'r') return pickle.load(pickle_fh)
[ "def", "read_checkpoint", "(", "out_fp", ")", ":", "pickle_fh", "=", "open", "(", "out_fp", ",", "'r'", ")", "return", "pickle", ".", "load", "(", "pickle_fh", ")" ]
read in information stored in a checkpoint out_fp: the path to the checkpoint dir .
train
false
12,538
def new(rsa_key): return PKCS115_SigScheme(rsa_key)
[ "def", "new", "(", "rsa_key", ")", ":", "return", "PKCS115_SigScheme", "(", "rsa_key", ")" ]
create a stateful counter block function suitable for ctr encryption modes .
train
false
12,539
@testing.requires_testing_data def test_hpi_info(): tempdir = _TempDir() temp_name = op.join(tempdir, 'temp_raw.fif') for fname in (chpi_fif_fname, sss_fif_fname): raw = read_raw_fif(fname, allow_maxshield='yes').crop(0, 0.1) assert_true((len(raw.info['hpi_subsystem']) > 0)) raw.save(temp_name, overwrite=True) info = read_info(temp_name) assert_equal(len(info['hpi_subsystem']), len(raw.info['hpi_subsystem']))
[ "@", "testing", ".", "requires_testing_data", "def", "test_hpi_info", "(", ")", ":", "tempdir", "=", "_TempDir", "(", ")", "temp_name", "=", "op", ".", "join", "(", "tempdir", ",", "'temp_raw.fif'", ")", "for", "fname", "in", "(", "chpi_fif_fname", ",", "sss_fif_fname", ")", ":", "raw", "=", "read_raw_fif", "(", "fname", ",", "allow_maxshield", "=", "'yes'", ")", ".", "crop", "(", "0", ",", "0.1", ")", "assert_true", "(", "(", "len", "(", "raw", ".", "info", "[", "'hpi_subsystem'", "]", ")", ">", "0", ")", ")", "raw", ".", "save", "(", "temp_name", ",", "overwrite", "=", "True", ")", "info", "=", "read_info", "(", "temp_name", ")", "assert_equal", "(", "len", "(", "info", "[", "'hpi_subsystem'", "]", ")", ",", "len", "(", "raw", ".", "info", "[", "'hpi_subsystem'", "]", ")", ")" ]
test getting hpi info .
train
false
12,540
def make_parameterization_form(query_str): variables = find_variables(query_str) if (len(variables) > 0): class Form(forms.Form, ): for name in sorted(variables): locals()[name] = forms.CharField(required=True) return Form else: return None
[ "def", "make_parameterization_form", "(", "query_str", ")", ":", "variables", "=", "find_variables", "(", "query_str", ")", "if", "(", "len", "(", "variables", ")", ">", "0", ")", ":", "class", "Form", "(", "forms", ".", "Form", ",", ")", ":", "for", "name", "in", "sorted", "(", "variables", ")", ":", "locals", "(", ")", "[", "name", "]", "=", "forms", ".", "CharField", "(", "required", "=", "True", ")", "return", "Form", "else", ":", "return", "None" ]
creates a django form on the fly with arguments from the query .
train
false
12,541
def getIsPointAway(minimumDistance, point, spongeCircles): for otherSpongeCircle in spongeCircles: if (abs((otherSpongeCircle.center - point)) < minimumDistance): return False return True
[ "def", "getIsPointAway", "(", "minimumDistance", ",", "point", ",", "spongeCircles", ")", ":", "for", "otherSpongeCircle", "in", "spongeCircles", ":", "if", "(", "abs", "(", "(", "otherSpongeCircle", ".", "center", "-", "point", ")", ")", "<", "minimumDistance", ")", ":", "return", "False", "return", "True" ]
determine if the point is at least the minimumdistance away from other points .
train
false
12,544
def create_mock_key_provider(key_list): public_keys = {} for k in key_list: public_keys[k] = {'Fingerprint': k, 'Value': 'ffaa00'} key_provider = Mock() key_provider.get_public_keys.return_value = public_keys return key_provider
[ "def", "create_mock_key_provider", "(", "key_list", ")", ":", "public_keys", "=", "{", "}", "for", "k", "in", "key_list", ":", "public_keys", "[", "k", "]", "=", "{", "'Fingerprint'", ":", "k", ",", "'Value'", ":", "'ffaa00'", "}", "key_provider", "=", "Mock", "(", ")", "key_provider", ".", "get_public_keys", ".", "return_value", "=", "public_keys", "return", "key_provider" ]
creates a mock key provider that yields keys for each in key_list .
train
false
12,545
def strip_port_from_host(host): return host.split(':')[0]
[ "def", "strip_port_from_host", "(", "host", ")", ":", "return", "host", ".", "split", "(", "':'", ")", "[", "0", "]" ]
strips port number from host .
train
false
12,546
def create_issues_in_bulk(bulk_data, callback=None, precall=None, **additional_fields): issues = get_issues_from_bulk(bulk_data, **additional_fields) disconnect_issues_signals() try: db.save_in_bulk(issues, callback, precall) finally: connect_issues_signals() return issues
[ "def", "create_issues_in_bulk", "(", "bulk_data", ",", "callback", "=", "None", ",", "precall", "=", "None", ",", "**", "additional_fields", ")", ":", "issues", "=", "get_issues_from_bulk", "(", "bulk_data", ",", "**", "additional_fields", ")", "disconnect_issues_signals", "(", ")", "try", ":", "db", ".", "save_in_bulk", "(", "issues", ",", "callback", ",", "precall", ")", "finally", ":", "connect_issues_signals", "(", ")", "return", "issues" ]
create issues from bulk_data .
train
false
12,548
def hosts_remove(hostsfile='/etc/hosts', entries=None): with salt.utils.fopen(hostsfile, 'r') as fp_: hosts = fp_.read() host_list = entries.split(',') with salt.utils.fopen(hostsfile, 'w') as out_file: for line in hosts.splitlines(): if ((not line) or line.strip().startswith('#')): out_file.write('{0}\n'.format(line)) continue comps = line.split() for host in host_list: if (host in comps[1:]): comps.remove(host) if (len(comps) > 1): out_file.write(' '.join(comps)) out_file.write('\n')
[ "def", "hosts_remove", "(", "hostsfile", "=", "'/etc/hosts'", ",", "entries", "=", "None", ")", ":", "with", "salt", ".", "utils", ".", "fopen", "(", "hostsfile", ",", "'r'", ")", "as", "fp_", ":", "hosts", "=", "fp_", ".", "read", "(", ")", "host_list", "=", "entries", ".", "split", "(", "','", ")", "with", "salt", ".", "utils", ".", "fopen", "(", "hostsfile", ",", "'w'", ")", "as", "out_file", ":", "for", "line", "in", "hosts", ".", "splitlines", "(", ")", ":", "if", "(", "(", "not", "line", ")", "or", "line", ".", "strip", "(", ")", ".", "startswith", "(", "'#'", ")", ")", ":", "out_file", ".", "write", "(", "'{0}\\n'", ".", "format", "(", "line", ")", ")", "continue", "comps", "=", "line", ".", "split", "(", ")", "for", "host", "in", "host_list", ":", "if", "(", "host", "in", "comps", "[", "1", ":", "]", ")", ":", "comps", ".", "remove", "(", "host", ")", "if", "(", "len", "(", "comps", ")", ">", "1", ")", ":", "out_file", ".", "write", "(", "' '", ".", "join", "(", "comps", ")", ")", "out_file", ".", "write", "(", "'\\n'", ")" ]
remove a host from the /etc/hosts file .
train
true
12,549
def make_identity_dict(rng): res = {} for i in rng: res[i] = i return res
[ "def", "make_identity_dict", "(", "rng", ")", ":", "res", "=", "{", "}", "for", "i", "in", "rng", ":", "res", "[", "i", "]", "=", "i", "return", "res" ]
make_identity_dict -> dict return a dictionary where elements of the rng sequence are mapped to themselves .
train
false
12,550
def _get_final_tree(s): tokenized = tokenizer.parseString(s) parsed = parser.parse(tokenized) merged = _merge_children(parsed, {'S', 'group'}) final = _clean_parse_tree(merged) return final
[ "def", "_get_final_tree", "(", "s", ")", ":", "tokenized", "=", "tokenizer", ".", "parseString", "(", "s", ")", "parsed", "=", "parser", ".", "parse", "(", "tokenized", ")", "merged", "=", "_merge_children", "(", "parsed", ",", "{", "'S'", ",", "'group'", "}", ")", "final", "=", "_clean_parse_tree", "(", "merged", ")", "return", "final" ]
return final tree after merge and clean .
train
false
12,551
@register.filter def is_equal_or_parent_of(page1, page2): try: return page1.is_ancestor_of(page2, include_self=True) except (AttributeError, ValueError): return False
[ "@", "register", ".", "filter", "def", "is_equal_or_parent_of", "(", "page1", ",", "page2", ")", ":", "try", ":", "return", "page1", ".", "is_ancestor_of", "(", "page2", ",", "include_self", "=", "True", ")", "except", "(", "AttributeError", ",", "ValueError", ")", ":", "return", "False" ]
determines whether a given page is equal to or the parent of another page .
train
false
12,552
def get_download_pipeline(in_fd, out_fd, gpg=False, lzop=True): commands = [] if gpg: commands.append(GPGDecryptionFilter()) if lzop: commands.append(LZODecompressionFilter()) return Pipeline(commands, in_fd, out_fd)
[ "def", "get_download_pipeline", "(", "in_fd", ",", "out_fd", ",", "gpg", "=", "False", ",", "lzop", "=", "True", ")", ":", "commands", "=", "[", "]", "if", "gpg", ":", "commands", ".", "append", "(", "GPGDecryptionFilter", "(", ")", ")", "if", "lzop", ":", "commands", ".", "append", "(", "LZODecompressionFilter", "(", ")", ")", "return", "Pipeline", "(", "commands", ",", "in_fd", ",", "out_fd", ")" ]
create a pipeline to process a file after downloading .
train
true
12,553
def relaxed_caveman_graph(l, k, p, seed=None): if (not (seed is None)): random.seed(seed) G = nx.caveman_graph(l, k) nodes = list(G) G.name = ('relaxed_caveman_graph (%s,%s,%s)' % (l, k, p)) for (u, v) in G.edges(): if (random.random() < p): x = random.choice(nodes) if G.has_edge(u, x): continue G.remove_edge(u, v) G.add_edge(u, x) return G
[ "def", "relaxed_caveman_graph", "(", "l", ",", "k", ",", "p", ",", "seed", "=", "None", ")", ":", "if", "(", "not", "(", "seed", "is", "None", ")", ")", ":", "random", ".", "seed", "(", "seed", ")", "G", "=", "nx", ".", "caveman_graph", "(", "l", ",", "k", ")", "nodes", "=", "list", "(", "G", ")", "G", ".", "name", "=", "(", "'relaxed_caveman_graph (%s,%s,%s)'", "%", "(", "l", ",", "k", ",", "p", ")", ")", "for", "(", "u", ",", "v", ")", "in", "G", ".", "edges", "(", ")", ":", "if", "(", "random", ".", "random", "(", ")", "<", "p", ")", ":", "x", "=", "random", ".", "choice", "(", "nodes", ")", "if", "G", ".", "has_edge", "(", "u", ",", "x", ")", ":", "continue", "G", ".", "remove_edge", "(", "u", ",", "v", ")", "G", ".", "add_edge", "(", "u", ",", "x", ")", "return", "G" ]
return a relaxed caveman graph .
train
false
12,554
@deprecated('The function wishart_log_det is deprecated in 0.18 and will be removed in 0.20.') def wishart_log_det(a, b, detB, n_features): l = np.sum(digamma((0.5 * (a - np.arange((-1), (n_features - 1)))))) l += (n_features * np.log(2)) return (l + detB)
[ "@", "deprecated", "(", "'The function wishart_log_det is deprecated in 0.18 and will be removed in 0.20.'", ")", "def", "wishart_log_det", "(", "a", ",", "b", ",", "detB", ",", "n_features", ")", ":", "l", "=", "np", ".", "sum", "(", "digamma", "(", "(", "0.5", "*", "(", "a", "-", "np", ".", "arange", "(", "(", "-", "1", ")", ",", "(", "n_features", "-", "1", ")", ")", ")", ")", ")", ")", "l", "+=", "(", "n_features", "*", "np", ".", "log", "(", "2", ")", ")", "return", "(", "l", "+", "detB", ")" ]
expected value of the log of the determinant of a wishart the expected value of the logarithm of the determinant of a wishart-distributed random variable with the specified parameters .
train
false
12,556
def createTree(num_nodes): root = Node((num_nodes // 2)) for i in range(num_nodes): if (i == (num_nodes // 2)): continue root = insert(i, root) return root
[ "def", "createTree", "(", "num_nodes", ")", ":", "root", "=", "Node", "(", "(", "num_nodes", "//", "2", ")", ")", "for", "i", "in", "range", "(", "num_nodes", ")", ":", "if", "(", "i", "==", "(", "num_nodes", "//", "2", ")", ")", ":", "continue", "root", "=", "insert", "(", "i", ",", "root", ")", "return", "root" ]
a function created to construct the tree from 0 to num_nodes the root will be the the middle number of num_nodes .
train
false
12,557
@require_admin_context def volume_detached(context, volume_id, attachment_id): remain_attachment = True session = get_session() with session.begin(): try: attachment = _attachment_get(context, attachment_id, session=session) except exception.VolumeAttachmentNotFound: attachment_updates = None attachment = None if attachment: now = timeutils.utcnow() attachment_updates = {'attach_status': fields.VolumeAttachStatus.DETACHED, 'detach_time': now, 'deleted': True, 'deleted_at': now, 'updated_at': literal_column('updated_at')} attachment.update(attachment_updates) attachment.save(session=session) del attachment_updates['updated_at'] attachment_list = None volume_ref = _volume_get(context, volume_id, session=session) volume_updates = {'updated_at': literal_column('updated_at')} if (not volume_ref.volume_attachment): attachment_list = volume_attachment_get_all_by_volume_id(context, volume_id, session=session) remain_attachment = False if (attachment_list and (len(attachment_list) > 0)): remain_attachment = True if (not remain_attachment): if (((not volume_ref.migration_status) and (not (volume_ref.status == 'uploading'))) or (volume_ref.migration_status in ('success', 'error'))): volume_updates['status'] = 'available' volume_updates['attach_status'] = fields.VolumeAttachStatus.DETACHED else: volume_updates['status'] = 'in-use' volume_updates['attach_status'] = fields.VolumeAttachStatus.ATTACHED volume_ref.update(volume_updates) volume_ref.save(session=session) del volume_updates['updated_at'] return (volume_updates, attachment_updates)
[ "@", "require_admin_context", "def", "volume_detached", "(", "context", ",", "volume_id", ",", "attachment_id", ")", ":", "remain_attachment", "=", "True", "session", "=", "get_session", "(", ")", "with", "session", ".", "begin", "(", ")", ":", "try", ":", "attachment", "=", "_attachment_get", "(", "context", ",", "attachment_id", ",", "session", "=", "session", ")", "except", "exception", ".", "VolumeAttachmentNotFound", ":", "attachment_updates", "=", "None", "attachment", "=", "None", "if", "attachment", ":", "now", "=", "timeutils", ".", "utcnow", "(", ")", "attachment_updates", "=", "{", "'attach_status'", ":", "fields", ".", "VolumeAttachStatus", ".", "DETACHED", ",", "'detach_time'", ":", "now", ",", "'deleted'", ":", "True", ",", "'deleted_at'", ":", "now", ",", "'updated_at'", ":", "literal_column", "(", "'updated_at'", ")", "}", "attachment", ".", "update", "(", "attachment_updates", ")", "attachment", ".", "save", "(", "session", "=", "session", ")", "del", "attachment_updates", "[", "'updated_at'", "]", "attachment_list", "=", "None", "volume_ref", "=", "_volume_get", "(", "context", ",", "volume_id", ",", "session", "=", "session", ")", "volume_updates", "=", "{", "'updated_at'", ":", "literal_column", "(", "'updated_at'", ")", "}", "if", "(", "not", "volume_ref", ".", "volume_attachment", ")", ":", "attachment_list", "=", "volume_attachment_get_all_by_volume_id", "(", "context", ",", "volume_id", ",", "session", "=", "session", ")", "remain_attachment", "=", "False", "if", "(", "attachment_list", "and", "(", "len", "(", "attachment_list", ")", ">", "0", ")", ")", ":", "remain_attachment", "=", "True", "if", "(", "not", "remain_attachment", ")", ":", "if", "(", "(", "(", "not", "volume_ref", ".", "migration_status", ")", "and", "(", "not", "(", "volume_ref", ".", "status", "==", "'uploading'", ")", ")", ")", "or", "(", "volume_ref", ".", "migration_status", "in", "(", "'success'", ",", "'error'", ")", ")", ")", ":", "volume_updates", "[", "'status'", "]", "=", "'available'", "volume_updates", "[", "'attach_status'", "]", "=", "fields", ".", "VolumeAttachStatus", ".", "DETACHED", "else", ":", "volume_updates", "[", "'status'", "]", "=", "'in-use'", "volume_updates", "[", "'attach_status'", "]", "=", "fields", ".", "VolumeAttachStatus", ".", "ATTACHED", "volume_ref", ".", "update", "(", "volume_updates", ")", "volume_ref", ".", "save", "(", "session", "=", "session", ")", "del", "volume_updates", "[", "'updated_at'", "]", "return", "(", "volume_updates", ",", "attachment_updates", ")" ]
ensure that a volume is set as detached .
train
false
12,558
def date_and_time(): start_date = datetime.now() time_back = timedelta(seconds=random.randint(0, 473040000)) return (start_date - time_back)
[ "def", "date_and_time", "(", ")", ":", "start_date", "=", "datetime", ".", "now", "(", ")", "time_back", "=", "timedelta", "(", "seconds", "=", "random", ".", "randint", "(", "0", ",", "473040000", ")", ")", "return", "(", "start_date", "-", "time_back", ")" ]
fake date/time .
train
false
12,561
def alloca_once(builder, ty, size=None, name='', zfill=False): if isinstance(size, utils.INT_TYPES): size = ir.Constant(intp_t, size) with builder.goto_entry_block(): ptr = builder.alloca(ty, size=size, name=name) if zfill: builder.store(ty(None), ptr) return ptr
[ "def", "alloca_once", "(", "builder", ",", "ty", ",", "size", "=", "None", ",", "name", "=", "''", ",", "zfill", "=", "False", ")", ":", "if", "isinstance", "(", "size", ",", "utils", ".", "INT_TYPES", ")", ":", "size", "=", "ir", ".", "Constant", "(", "intp_t", ",", "size", ")", "with", "builder", ".", "goto_entry_block", "(", ")", ":", "ptr", "=", "builder", ".", "alloca", "(", "ty", ",", "size", "=", "size", ",", "name", "=", "name", ")", "if", "zfill", ":", "builder", ".", "store", "(", "ty", "(", "None", ")", ",", "ptr", ")", "return", "ptr" ]
allocate stack memory at the entry block of the current function pointed by builder withe llvm type ty .
train
false
12,562
def _StringListConverter(s): return [part.strip() for part in s.split(',')]
[ "def", "_StringListConverter", "(", "s", ")", ":", "return", "[", "part", ".", "strip", "(", ")", "for", "part", "in", "s", ".", "split", "(", "','", ")", "]" ]
option value converter for a comma-separated list of strings .
train
false
12,563
def img_check(elem): img = elem.xpath('img') if img: return 'Y' else: text = elem.text_content().strip() if text: return 'Y' else: return 'N'
[ "def", "img_check", "(", "elem", ")", ":", "img", "=", "elem", ".", "xpath", "(", "'img'", ")", "if", "img", ":", "return", "'Y'", "else", ":", "text", "=", "elem", ".", "text_content", "(", ")", ".", "strip", "(", ")", "if", "text", ":", "return", "'Y'", "else", ":", "return", "'N'" ]
checks if the cell contains an image and returns true or false used to see if a calendar was modified revised or cancelled .
train
false
12,564
def _get_interfaces(): global _INTERFACES if (not _INTERFACES): _INTERFACES = salt.utils.network.interfaces() return _INTERFACES
[ "def", "_get_interfaces", "(", ")", ":", "global", "_INTERFACES", "if", "(", "not", "_INTERFACES", ")", ":", "_INTERFACES", "=", "salt", ".", "utils", ".", "network", ".", "interfaces", "(", ")", "return", "_INTERFACES" ]
provide a dict of the connected interfaces and their ip addresses .
train
false
12,568
def edit_tab_handler(course_item, request): tab_id_locator = request.json['tab_id_locator'] tab = get_tab_by_tab_id_locator(course_item.tabs, tab_id_locator) if (tab is None): return JsonResponse({'error': "Tab with id_locator '{0}' does not exist.".format(tab_id_locator)}, status=400) if ('is_hidden' in request.json): tab.is_hidden = request.json['is_hidden'] modulestore().update_item(course_item, request.user.id) else: raise NotImplementedError('Unsupported request to edit tab: {0}'.format(request.json)) return JsonResponse()
[ "def", "edit_tab_handler", "(", "course_item", ",", "request", ")", ":", "tab_id_locator", "=", "request", ".", "json", "[", "'tab_id_locator'", "]", "tab", "=", "get_tab_by_tab_id_locator", "(", "course_item", ".", "tabs", ",", "tab_id_locator", ")", "if", "(", "tab", "is", "None", ")", ":", "return", "JsonResponse", "(", "{", "'error'", ":", "\"Tab with id_locator '{0}' does not exist.\"", ".", "format", "(", "tab_id_locator", ")", "}", ",", "status", "=", "400", ")", "if", "(", "'is_hidden'", "in", "request", ".", "json", ")", ":", "tab", ".", "is_hidden", "=", "request", ".", "json", "[", "'is_hidden'", "]", "modulestore", "(", ")", ".", "update_item", "(", "course_item", ",", "request", ".", "user", ".", "id", ")", "else", ":", "raise", "NotImplementedError", "(", "'Unsupported request to edit tab: {0}'", ".", "format", "(", "request", ".", "json", ")", ")", "return", "JsonResponse", "(", ")" ]
helper function for handling requests to edit settings of a single tab .
train
false
12,569
def check_feature_enabled(feature_name): def _check_feature_enabled(view_func): def _decorator(request, *args, **kwargs): if (not settings.FEATURES.get(feature_name, False)): return HttpResponseBadRequest() return view_func(request, *args, **kwargs) return wraps(view_func)(_decorator) return _check_feature_enabled
[ "def", "check_feature_enabled", "(", "feature_name", ")", ":", "def", "_check_feature_enabled", "(", "view_func", ")", ":", "def", "_decorator", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "settings", ".", "FEATURES", ".", "get", "(", "feature_name", ",", "False", ")", ")", ":", "return", "HttpResponseBadRequest", "(", ")", "return", "view_func", "(", "request", ",", "*", "args", ",", "**", "kwargs", ")", "return", "wraps", "(", "view_func", ")", "(", "_decorator", ")", "return", "_check_feature_enabled" ]
ensure the specified feature is turned on .
train
false
12,570
def simple_class_factory(model, attrs): return model
[ "def", "simple_class_factory", "(", "model", ",", "attrs", ")", ":", "return", "model" ]
used to unpickle models without deferred fields .
train
false
12,571
def classdot2class(path): if ((path[0] == 'L') and (path[(-1)] == ';')): print ('WARNING: %s already a Lclass; name' % path) return path new_name = (('L' + path.replace('.', '/')) + ';') return new_name
[ "def", "classdot2class", "(", "path", ")", ":", "if", "(", "(", "path", "[", "0", "]", "==", "'L'", ")", "and", "(", "path", "[", "(", "-", "1", ")", "]", "==", "';'", ")", ")", ":", "print", "(", "'WARNING: %s already a Lclass; name'", "%", "path", ")", "return", "path", "new_name", "=", "(", "(", "'L'", "+", "path", ".", "replace", "(", "'.'", ",", "'/'", ")", ")", "+", "';'", ")", "return", "new_name" ]
convert a path such as android .
train
false
12,572
def get_addon_count(name): return Addon.unfiltered.filter(name__localized_string=name).count()
[ "def", "get_addon_count", "(", "name", ")", ":", "return", "Addon", ".", "unfiltered", ".", "filter", "(", "name__localized_string", "=", "name", ")", ".", "count", "(", ")" ]
return the number of addons with the given name .
train
false
12,573
def user_with_role(user, role): return {'id': user.id, 'username': user.username, 'email': user.email, 'role': role}
[ "def", "user_with_role", "(", "user", ",", "role", ")", ":", "return", "{", "'id'", ":", "user", ".", "id", ",", "'username'", ":", "user", ".", "username", ",", "'email'", ":", "user", ".", "email", ",", "'role'", ":", "role", "}" ]
build user representation with attached role .
train
false
12,575
def sos2tf(sos): sos = np.asarray(sos) b = [1.0] a = [1.0] n_sections = sos.shape[0] for section in range(n_sections): b = np.polymul(b, sos[section, :3]) a = np.polymul(a, sos[section, 3:]) return (b, a)
[ "def", "sos2tf", "(", "sos", ")", ":", "sos", "=", "np", ".", "asarray", "(", "sos", ")", "b", "=", "[", "1.0", "]", "a", "=", "[", "1.0", "]", "n_sections", "=", "sos", ".", "shape", "[", "0", "]", "for", "section", "in", "range", "(", "n_sections", ")", ":", "b", "=", "np", ".", "polymul", "(", "b", ",", "sos", "[", "section", ",", ":", "3", "]", ")", "a", "=", "np", ".", "polymul", "(", "a", ",", "sos", "[", "section", ",", "3", ":", "]", ")", "return", "(", "b", ",", "a", ")" ]
return a single transfer function from a series of second-order sections parameters sos : array_like array of second-order filter coefficients .
train
false
12,576
@lbheartbeat.get(permission=NO_PERMISSION_REQUIRED) def get_lbheartbeat(request): status = {} return status
[ "@", "lbheartbeat", ".", "get", "(", "permission", "=", "NO_PERMISSION_REQUIRED", ")", "def", "get_lbheartbeat", "(", "request", ")", ":", "status", "=", "{", "}", "return", "status" ]
return successful healthy response .
train
false
12,577
def inject_passwords(registry, xml_parent, data): eib = XML.SubElement(xml_parent, 'EnvInjectPasswordWrapper') XML.SubElement(eib, 'injectGlobalPasswords').text = str(data.get('global', False)).lower() XML.SubElement(eib, 'maskPasswordParameters').text = str(data.get('mask-password-params', False)).lower() entries = XML.SubElement(eib, 'passwordEntries') passwords = data.get('job-passwords', []) if passwords: for password in passwords: entry = XML.SubElement(entries, 'EnvInjectPasswordEntry') XML.SubElement(entry, 'name').text = password['name'] XML.SubElement(entry, 'value').text = password['password']
[ "def", "inject_passwords", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "eib", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'EnvInjectPasswordWrapper'", ")", "XML", ".", "SubElement", "(", "eib", ",", "'injectGlobalPasswords'", ")", ".", "text", "=", "str", "(", "data", ".", "get", "(", "'global'", ",", "False", ")", ")", ".", "lower", "(", ")", "XML", ".", "SubElement", "(", "eib", ",", "'maskPasswordParameters'", ")", ".", "text", "=", "str", "(", "data", ".", "get", "(", "'mask-password-params'", ",", "False", ")", ")", ".", "lower", "(", ")", "entries", "=", "XML", ".", "SubElement", "(", "eib", ",", "'passwordEntries'", ")", "passwords", "=", "data", ".", "get", "(", "'job-passwords'", ",", "[", "]", ")", "if", "passwords", ":", "for", "password", "in", "passwords", ":", "entry", "=", "XML", ".", "SubElement", "(", "entries", ",", "'EnvInjectPasswordEntry'", ")", "XML", ".", "SubElement", "(", "entry", ",", "'name'", ")", ".", "text", "=", "password", "[", "'name'", "]", "XML", ".", "SubElement", "(", "entry", ",", "'value'", ")", ".", "text", "=", "password", "[", "'password'", "]" ]
yaml: inject-passwords inject passwords to the build as environment variables .
train
false
12,578
def draft_node_constructor(module, url, parent_url, location=None, parent_location=None, index=None): draft_node = namedtuple('draft_node', ['module', 'location', 'url', 'parent_location', 'parent_url', 'index']) return draft_node(module, location, url, parent_location, parent_url, index)
[ "def", "draft_node_constructor", "(", "module", ",", "url", ",", "parent_url", ",", "location", "=", "None", ",", "parent_location", "=", "None", ",", "index", "=", "None", ")", ":", "draft_node", "=", "namedtuple", "(", "'draft_node'", ",", "[", "'module'", ",", "'location'", ",", "'url'", ",", "'parent_location'", ",", "'parent_url'", ",", "'index'", "]", ")", "return", "draft_node", "(", "module", ",", "location", ",", "url", ",", "parent_location", ",", "parent_url", ",", "index", ")" ]
contructs a draft_node namedtuple with defaults .
train
false