id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
33,358
def database_get_image_dimensions(file, close=False, dimensions=None): storage_hash = utils.get_storage_hash(file.storage) dimensions = None dimensions_cache = None try: thumbnail = models.Thumbnail.objects.select_related('dimensions').get(storage_hash=storage_hash, name=file.name) except models.Thumbnail.DoesNotExist: thumbnail = None else: try: dimensions_cache = thumbnail.dimensions except models.ThumbnailDimensions.DoesNotExist: dimensions_cache = None if dimensions_cache: return (dimensions_cache.width, dimensions_cache.height) dimensions = get_image_dimensions(file, close=close) if (settings.THUMBNAIL_CACHE_DIMENSIONS and thumbnail): models.ThumbnailDimensions.objects.get_or_create(thumbnail=thumbnail, defaults={'width': dimensions[0], 'height': dimensions[1]}) return dimensions
[ "def", "database_get_image_dimensions", "(", "file", ",", "close", "=", "False", ",", "dimensions", "=", "None", ")", ":", "storage_hash", "=", "utils", ".", "get_storage_hash", "(", "file", ".", "storage", ")", "dimensions", "=", "None", "dimensions_cache", "...
returns the of an image .
train
true
33,359
def channel_and_http_session(func): @http_session @channel_session @functools.wraps(func) def inner(message, *args, **kwargs): if ((message.http_session is not None) and (settings.SESSION_COOKIE_NAME not in message.channel_session)): message.channel_session[settings.SESSION_COOKIE_NAME] = message.http_session.session_key elif ((message.http_session is None) and (settings.SESSION_COOKIE_NAME in message.channel_session)): session_engine = import_module(settings.SESSION_ENGINE) session = session_engine.SessionStore(session_key=message.channel_session[settings.SESSION_COOKIE_NAME]) message.http_session = session return func(message, *args, **kwargs) return inner
[ "def", "channel_and_http_session", "(", "func", ")", ":", "@", "http_session", "@", "channel_session", "@", "functools", ".", "wraps", "(", "func", ")", "def", "inner", "(", "message", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "(", "m...
enables both the channel_session and http_session .
train
false
33,360
def _compute_diff(configured, expected): diff = {'add': {}, 'update': {}, 'remove': {}} configured_users = set(configured.keys()) expected_users = set(expected.keys()) add_usernames = (expected_users - configured_users) remove_usernames = (configured_users - expected_users) common_usernames = (expected_users & configured_users) add = dict(((username, expected.get(username)) for username in add_usernames)) remove = dict(((username, configured.get(username)) for username in remove_usernames)) update = {} for username in common_usernames: user_configuration = configured.get(username) user_expected = expected.get(username) if (user_configuration == user_expected): continue update[username] = {} for (field, field_value) in six.iteritems(user_expected): if (user_configuration.get(field) != field_value): update[username][field] = field_value diff.update({'add': add, 'update': update, 'remove': remove}) return diff
[ "def", "_compute_diff", "(", "configured", ",", "expected", ")", ":", "diff", "=", "{", "'add'", ":", "{", "}", ",", "'update'", ":", "{", "}", ",", "'remove'", ":", "{", "}", "}", "configured_users", "=", "set", "(", "configured", ".", "keys", "(", ...
compares configured endpoints with the expected configuration and returns the differences .
train
true
33,362
def str_get(arr, i): f = (lambda x: (x[i] if (len(x) > i) else np.nan)) return _na_map(f, arr)
[ "def", "str_get", "(", "arr", ",", "i", ")", ":", "f", "=", "(", "lambda", "x", ":", "(", "x", "[", "i", "]", "if", "(", "len", "(", "x", ")", ">", "i", ")", "else", "np", ".", "nan", ")", ")", "return", "_na_map", "(", "f", ",", "arr", ...
extract element from lists .
train
false
33,363
def name_by_pid(pid): process_name = None if stem.util.proc.is_available(): try: process_name = stem.util.proc.stats(pid, stem.util.proc.Stat.COMMAND)[0] except IOError: pass if (not process_name): try: results = call((GET_NAME_BY_PID_PS % pid)) except OSError: results = None if (results and (len(results) == 2) and (results[0] == 'COMMAND')): process_name = results[1].strip() return process_name
[ "def", "name_by_pid", "(", "pid", ")", ":", "process_name", "=", "None", "if", "stem", ".", "util", ".", "proc", ".", "is_available", "(", ")", ":", "try", ":", "process_name", "=", "stem", ".", "util", ".", "proc", ".", "stats", "(", "pid", ",", "...
attempts to determine the name a given process is running under .
train
false
33,364
def fake_data(n_docs, n_words, n_sent_length, n_topics): doc_topics = orthogonal_matrix([n_docs, n_topics]) wrd_topics = orthogonal_matrix([n_topics, n_words]) doc_to_wrds = softmax(np.dot(doc_topics, wrd_topics)) indices = np.arange(n_words).astype('int32') sentences = [] for doc_to_wrd in doc_to_wrds: words = sample(indices, doc_to_wrd, n_sent_length) sentences.append(words) sentences = np.array(sentences) return sentences.astype('int32')
[ "def", "fake_data", "(", "n_docs", ",", "n_words", ",", "n_sent_length", ",", "n_topics", ")", ":", "doc_topics", "=", "orthogonal_matrix", "(", "[", "n_docs", ",", "n_topics", "]", ")", "wrd_topics", "=", "orthogonal_matrix", "(", "[", "n_topics", ",", "n_w...
generate a fake dataset that matches the dimensions of mnist .
train
false
33,365
def get_commands(): global _commands if (_commands is None): _commands = dict([(name, 'django.core') for name in find_commands(__path__[0])]) from django.conf import settings try: apps = settings.INSTALLED_APPS except ImproperlyConfigured: apps = [] for app_name in apps: try: path = find_management_module(app_name) _commands.update(dict([(name, app_name) for name in find_commands(path)])) except ImportError: pass return _commands
[ "def", "get_commands", "(", ")", ":", "global", "_commands", "if", "(", "_commands", "is", "None", ")", ":", "_commands", "=", "dict", "(", "[", "(", "name", ",", "'django.core'", ")", "for", "name", "in", "find_commands", "(", "__path__", "[", "0", "]...
returns a dictionary mapping command names to their callback applications .
train
false
33,366
def violations(registry, xml_parent, data): violations = XML.SubElement(xml_parent, 'hudson.plugins.violations.ViolationsPublisher') config = XML.SubElement(violations, 'config') suppressions = XML.SubElement(config, 'suppressions', {'class': 'tree-set'}) XML.SubElement(suppressions, 'no-comparator') configs = XML.SubElement(config, 'typeConfigs') XML.SubElement(configs, 'no-comparator') for name in ['checkstyle', 'codenarc', 'cpd', 'cpplint', 'csslint', 'findbugs', 'fxcop', 'gendarme', 'jcreport', 'jslint', 'pep8', 'perlcritic', 'pmd', 'pylint', 'simian', 'stylecop']: _violations_add_entry(configs, name, data.get(name, {})) XML.SubElement(config, 'limit').text = '100' XML.SubElement(config, 'sourcePathPattern') XML.SubElement(config, 'fauxProjectPath') XML.SubElement(config, 'encoding').text = 'default'
[ "def", "violations", "(", "registry", ",", "xml_parent", ",", "data", ")", ":", "violations", "=", "XML", ".", "SubElement", "(", "xml_parent", ",", "'hudson.plugins.violations.ViolationsPublisher'", ")", "config", "=", "XML", ".", "SubElement", "(", "violations",...
yaml: violations publish code style violations .
train
false
33,367
def import_modules_recursively(topdir): modules = [] for (root, dirs, files) in os.walk(topdir): for file_ in files: if (file_[(-3):] != '.py'): continue module = file_[:(-3)] if (module == '__init__'): continue import_base = root.replace('/', '.') prefixlen = len(os.path.dirname(neutron.__file__)) import_base = ('neutron' + import_base[prefixlen:]) module = '.'.join([import_base, module]) if (module not in sys.modules): importlib.import_module(module) modules.append(module) return modules
[ "def", "import_modules_recursively", "(", "topdir", ")", ":", "modules", "=", "[", "]", "for", "(", "root", ",", "dirs", ",", "files", ")", "in", "os", ".", "walk", "(", "topdir", ")", ":", "for", "file_", "in", "files", ":", "if", "(", "file_", "[...
import and return all modules below the topdir directory .
train
false
33,368
def lv_absent(name, vgname=None): ret = {'changes': {}, 'comment': '', 'name': name, 'result': True} lvpath = '/dev/{0}/{1}'.format(vgname, name) if (not __salt__['lvm.lvdisplay'](lvpath)): ret['comment'] = 'Logical Volume {0} already absent'.format(name) elif __opts__['test']: ret['comment'] = 'Logical Volume {0} is set to be removed'.format(name) ret['result'] = None return ret else: changes = __salt__['lvm.lvremove'](name, vgname) if (not __salt__['lvm.lvdisplay'](lvpath)): ret['comment'] = 'Removed Logical Volume {0}'.format(name) ret['changes']['removed'] = changes else: ret['comment'] = 'Failed to remove Logical Volume {0}'.format(name) ret['result'] = False return ret
[ "def", "lv_absent", "(", "name", ",", "vgname", "=", "None", ")", ":", "ret", "=", "{", "'changes'", ":", "{", "}", ",", "'comment'", ":", "''", ",", "'name'", ":", "name", ",", "'result'", ":", "True", "}", "lvpath", "=", "'/dev/{0}/{1}'", ".", "f...
remove a given existing logical volume from a named existing volume group name the logical volume to remove vgname the volume group name .
train
true
33,369
@when(u'we send "ctrl + d"') def step_ctrl_d(context): context.cli.sendcontrol(u'd') context.exit_sent = True
[ "@", "when", "(", "u'we send \"ctrl + d\"'", ")", "def", "step_ctrl_d", "(", "context", ")", ":", "context", ".", "cli", ".", "sendcontrol", "(", "u'd'", ")", "context", ".", "exit_sent", "=", "True" ]
send ctrl + d to hopefully exit .
train
false
33,372
def strconvert(d): return dict([(str(k), v) for (k, v) in six.iteritems(d)])
[ "def", "strconvert", "(", "d", ")", ":", "return", "dict", "(", "[", "(", "str", "(", "k", ")", ",", "v", ")", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "d", ")", "]", ")" ]
converts all keys in dictionary to str type .
train
false
33,373
def header_elements(fieldname, fieldvalue): if (not fieldvalue): return [] result = [] for element in RE_HEADER_SPLIT.split(fieldvalue): if (fieldname.startswith('Accept') or (fieldname == 'TE')): hv = AcceptElement.from_str(element) else: hv = HeaderElement.from_str(element) result.append(hv) return list(reversed(sorted(result)))
[ "def", "header_elements", "(", "fieldname", ",", "fieldvalue", ")", ":", "if", "(", "not", "fieldvalue", ")", ":", "return", "[", "]", "result", "=", "[", "]", "for", "element", "in", "RE_HEADER_SPLIT", ".", "split", "(", "fieldvalue", ")", ":", "if", ...
return a sorted headerelement list from a comma-separated header string .
train
false
33,374
def _coord_byval(coord): return c_long(((coord.Y * 65536) | (coord.X & 65535)))
[ "def", "_coord_byval", "(", "coord", ")", ":", "return", "c_long", "(", "(", "(", "coord", ".", "Y", "*", "65536", ")", "|", "(", "coord", ".", "X", "&", "65535", ")", ")", ")" ]
turns a coord object into a c_long .
train
false
33,376
def RANDOMIZE(v): context.defaults['randomize'] = asbool(v)
[ "def", "RANDOMIZE", "(", "v", ")", ":", "context", ".", "defaults", "[", "'randomize'", "]", "=", "asbool", "(", "v", ")" ]
enables randomization of various pieces via context .
train
false
33,377
def unindent_string(docstring): lines = docstring.expandtabs().splitlines() icount = indentcount_lines(lines) if (icount == 0): return docstring return '\n'.join([line[icount:] for line in lines])
[ "def", "unindent_string", "(", "docstring", ")", ":", "lines", "=", "docstring", ".", "expandtabs", "(", ")", ".", "splitlines", "(", ")", "icount", "=", "indentcount_lines", "(", "lines", ")", "if", "(", "icount", "==", "0", ")", ":", "return", "docstri...
set docstring to minimum indent for all lines .
train
false
33,378
def inet_ntop(address_family, packed_ip): if (address_family not in set([socket.AF_INET, socket.AF_INET6])): raise ValueError(unwrap(u'\n address_family must be socket.AF_INET (%s) or socket.AF_INET6 (%s),\n not %s\n ', repr(socket.AF_INET), repr(socket.AF_INET6), repr(address_family))) if (not isinstance(packed_ip, byte_cls)): raise TypeError(unwrap(u'\n packed_ip must be a byte string, not %s\n ', type_name(packed_ip))) required_len = (4 if (address_family == socket.AF_INET) else 16) if (len(packed_ip) != required_len): raise ValueError(unwrap(u'\n packed_ip must be %d bytes long - is %d\n ', required_len, len(packed_ip))) if (address_family == socket.AF_INET): return (u'%d.%d.%d.%d' % tuple(bytes_to_list(packed_ip))) octets = struct.unpack('!HHHHHHHH', packed_ip) runs_of_zero = {} longest_run = 0 zero_index = None for (i, octet) in enumerate((octets + ((-1),))): if (octet != 0): if (zero_index is not None): length = (i - zero_index) if (length not in runs_of_zero): runs_of_zero[length] = zero_index longest_run = max(longest_run, length) zero_index = None elif (zero_index is None): zero_index = i hexed = [hex(o)[2:] for o in octets] if (longest_run < 2): return u':'.join(hexed) zero_start = runs_of_zero[longest_run] zero_end = (zero_start + longest_run) return ((u':'.join(hexed[:zero_start]) + u'::') + u':'.join(hexed[zero_end:]))
[ "def", "inet_ntop", "(", "address_family", ",", "packed_ip", ")", ":", "if", "(", "address_family", "not", "in", "set", "(", "[", "socket", ".", "AF_INET", ",", "socket", ".", "AF_INET6", "]", ")", ")", ":", "raise", "ValueError", "(", "unwrap", "(", "...
convert the binary form of a network address into its textual form .
train
true
33,379
def get_user_id_from_username(username): user_model = user_models.UserSettingsModel.get_by_normalized_username(UserSettings.normalize_username(username)) if (user_model is None): return None else: return user_model.id
[ "def", "get_user_id_from_username", "(", "username", ")", ":", "user_model", "=", "user_models", ".", "UserSettingsModel", ".", "get_by_normalized_username", "(", "UserSettings", ".", "normalize_username", "(", "username", ")", ")", "if", "(", "user_model", "is", "N...
gets the user_id for a given username .
train
false
33,380
def _massage_metakeys(dct, prfx): lowprefix = prfx.lower() ret = {} for (k, v) in list(dct.items()): if (not k.lower().startswith(lowprefix)): k = ('%s%s' % (prfx, k)) ret[k] = v return ret
[ "def", "_massage_metakeys", "(", "dct", ",", "prfx", ")", ":", "lowprefix", "=", "prfx", ".", "lower", "(", ")", "ret", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "list", "(", "dct", ".", "items", "(", ")", ")", ":", "if", "(", "not...
returns a copy of the supplied dictionary .
train
true
33,381
def _get_block_coords(parsed_seq, row_dict, has_ner=False): start = 0 coords = [] if (not has_ner): splitter = _RE_EXON else: splitter = _RE_NER seq = parsed_seq[row_dict['query']] for block in re.split(splitter, seq): start += seq[start:].find(block) end = (start + len(block)) coords.append((start, end)) return coords
[ "def", "_get_block_coords", "(", "parsed_seq", ",", "row_dict", ",", "has_ner", "=", "False", ")", ":", "start", "=", "0", "coords", "=", "[", "]", "if", "(", "not", "has_ner", ")", ":", "splitter", "=", "_RE_EXON", "else", ":", "splitter", "=", "_RE_N...
returns a list of start .
train
false
33,382
@register.filter def get_links(value): try: try: from BeautifulSoup import BeautifulSoup except ImportError: from beautifulsoup import BeautifulSoup soup = BeautifulSoup(value) return soup.findAll('a') except ImportError: if settings.DEBUG: raise template.TemplateSyntaxError, "Error in 'get_links' filter: BeautifulSoup isn't installed." return value
[ "@", "register", ".", "filter", "def", "get_links", "(", "value", ")", ":", "try", ":", "try", ":", "from", "BeautifulSoup", "import", "BeautifulSoup", "except", "ImportError", ":", "from", "beautifulsoup", "import", "BeautifulSoup", "soup", "=", "BeautifulSoup"...
extracts links from a post body and returns a list .
train
false
33,383
def clean_loc_files(file_creator): file_creator.remove_all()
[ "def", "clean_loc_files", "(", "file_creator", ")", ":", "file_creator", ".", "remove_all", "(", ")" ]
removes all of the local files made .
train
false
33,384
def update_port_state(context, port_id, admin_state_up): port_id = port_id[0:11] session = context.session session.query(BrocadePort).filter_by(port_id=port_id).update({'admin_state_up': admin_state_up})
[ "def", "update_port_state", "(", "context", ",", "port_id", ",", "admin_state_up", ")", ":", "port_id", "=", "port_id", "[", "0", ":", "11", "]", "session", "=", "context", ".", "session", "session", ".", "query", "(", "BrocadePort", ")", ".", "filter_by",...
update port attributes .
train
false
33,385
def test_key(): def bad(): return (ENTER == dict()) assert_raises(ValueError, bad) assert_true((not (ENTER == None))) assert_equal('Return', ENTER) print ENTER.name print ENTER assert_equal(Key('1'), 49)
[ "def", "test_key", "(", ")", ":", "def", "bad", "(", ")", ":", "return", "(", "ENTER", "==", "dict", "(", ")", ")", "assert_raises", "(", "ValueError", ",", "bad", ")", "assert_true", "(", "(", "not", "(", "ENTER", "==", "None", ")", ")", ")", "a...
test basic key functionality .
train
false
33,388
@then(u'we see the named query deleted') def step_see_named_query_deleted(context): _expect_exact(context, u'foo: Deleted', timeout=1)
[ "@", "then", "(", "u'we see the named query deleted'", ")", "def", "step_see_named_query_deleted", "(", "context", ")", ":", "_expect_exact", "(", "context", ",", "u'foo: Deleted'", ",", "timeout", "=", "1", ")" ]
wait to see query deleted .
train
false
33,390
def adaptAgentObject(agent_object): agent = adaptAgent(LearningAgent)(Module(1, 1)) agent.agent = agent_object return agent
[ "def", "adaptAgentObject", "(", "agent_object", ")", ":", "agent", "=", "adaptAgent", "(", "LearningAgent", ")", "(", "Module", "(", "1", ",", "1", ")", ")", "agent", ".", "agent", "=", "agent_object", "return", "agent" ]
return an object that adapts a pybrain agent to the rlglue interface .
train
false
33,391
def reload_config(): _env_reloader.update()
[ "def", "reload_config", "(", ")", ":", "_env_reloader", ".", "update", "(", ")" ]
reloads configuration settings from a configuration file for the root package of the requested package/module .
train
false
33,392
def execvp(file, args): _execvpe(file, args)
[ "def", "execvp", "(", "file", ",", "args", ")", ":", "_execvpe", "(", "file", ",", "args", ")" ]
execvp execute the executable file with argument list args .
train
false
33,393
@_held_figure def convex_hull_plot_2d(hull, ax=None): from matplotlib.collections import LineCollection if (hull.points.shape[1] != 2): raise ValueError('Convex hull is not 2-D') ax.plot(hull.points[:, 0], hull.points[:, 1], 'o') line_segments = [] for simplex in hull.simplices: line_segments.append([(x, y) for (x, y) in hull.points[simplex]]) ax.add_collection(LineCollection(line_segments, colors='k', linestyle='solid')) _adjust_bounds(ax, hull.points) return ax.figure
[ "@", "_held_figure", "def", "convex_hull_plot_2d", "(", "hull", ",", "ax", "=", "None", ")", ":", "from", "matplotlib", ".", "collections", "import", "LineCollection", "if", "(", "hull", ".", "points", ".", "shape", "[", "1", "]", "!=", "2", ")", ":", ...
plot the given convex hull diagram in 2-d parameters hull : scipy .
train
false
33,394
@_define_event def pre_run_cell(): pass
[ "@", "_define_event", "def", "pre_run_cell", "(", ")", ":", "pass" ]
fires before user-entered code runs .
train
false
33,395
def test_read_noformat_arbitrary_file(tmpdir): _readers.update(_READERS_ORIGINAL) testfile = str(tmpdir.join(u'foo.example')) with open(testfile, u'w') as f: f.write(u'Hello world') with pytest.raises(io_registry.IORegistryError) as exc: Table.read(testfile) assert str(exc.value).startswith(u'Format could not be identified.')
[ "def", "test_read_noformat_arbitrary_file", "(", "tmpdir", ")", ":", "_readers", ".", "update", "(", "_READERS_ORIGINAL", ")", "testfile", "=", "str", "(", "tmpdir", ".", "join", "(", "u'foo.example'", ")", ")", "with", "open", "(", "testfile", ",", "u'w'", ...
tests that all identifier functions can accept arbitrary files .
train
false
33,396
def get_fasta_fps(fasta_dir, fasta_files): fasta_filepaths = [] for curr_file in fasta_files: curr_fp = join(fasta_dir, curr_file) try: file_test = open(curr_fp, 'U') file_test.close() except IOError: raise IOError(('Unable to open %s' % curr_fp)) fasta_filepaths.append(curr_fp) return fasta_filepaths
[ "def", "get_fasta_fps", "(", "fasta_dir", ",", "fasta_files", ")", ":", "fasta_filepaths", "=", "[", "]", "for", "curr_file", "in", "fasta_files", ":", "curr_fp", "=", "join", "(", "fasta_dir", ",", "curr_file", ")", "try", ":", "file_test", "=", "open", "...
returns list of fasta filepaths fasta_dir: directory of fasta files to check fasta_files: list of fasta filenames to open .
train
false
33,397
def subscription_name_from_path(path, project): return _name_from_project_path(path, project, _SUBSCRIPTION_TEMPLATE)
[ "def", "subscription_name_from_path", "(", "path", ",", "project", ")", ":", "return", "_name_from_project_path", "(", "path", ",", "project", ",", "_SUBSCRIPTION_TEMPLATE", ")" ]
validate a subscription uri path and get the subscription name .
train
false
33,398
def read_png_depth(filename): result = None f = open(filename, 'rb') try: f.seek((- (LEN_IEND + LEN_DEPTH)), 2) depthchunk = f.read(LEN_DEPTH) if (not depthchunk.startswith((DEPTH_CHUNK_LEN + DEPTH_CHUNK_START))): return None result = struct.unpack('!i', depthchunk[14:18])[0] finally: f.close() return result
[ "def", "read_png_depth", "(", "filename", ")", ":", "result", "=", "None", "f", "=", "open", "(", "filename", ",", "'rb'", ")", "try", ":", "f", ".", "seek", "(", "(", "-", "(", "LEN_IEND", "+", "LEN_DEPTH", ")", ")", ",", "2", ")", "depthchunk", ...
read the special text chunk indicating the depth from a png file .
train
false
33,399
def print_exception(etype, value, tb, limit=None, file=None): if (file is None): file = sys.stderr if tb: _print(file, 'Traceback (most recent call last):') print_tb(tb, limit, file) lines = format_exception_only(etype, value) for line in lines[:(-1)]: _print(file, line, ' ') _print(file, lines[(-1)], '')
[ "def", "print_exception", "(", "etype", ",", "value", ",", "tb", ",", "limit", "=", "None", ",", "file", "=", "None", ")", ":", "if", "(", "file", "is", "None", ")", ":", "file", "=", "sys", ".", "stderr", "if", "tb", ":", "_print", "(", "file", ...
print exception up to limit stack trace entries from tb to file .
train
false
33,400
@contextmanager def capture_reset_password_requests(reset_password_sent_at=None): reset_requests = [] def _on(app, **data): reset_requests.append(data) reset_password_instructions_sent.connect(_on) try: (yield reset_requests) finally: reset_password_instructions_sent.disconnect(_on)
[ "@", "contextmanager", "def", "capture_reset_password_requests", "(", "reset_password_sent_at", "=", "None", ")", ":", "reset_requests", "=", "[", "]", "def", "_on", "(", "app", ",", "**", "data", ")", ":", "reset_requests", ".", "append", "(", "data", ")", ...
testing utility for capturing password reset requests .
train
true
33,401
def _context_string_to_dict(context): if (not re.match('[^:]+:[^:]+:[^:]+:[^:]+$', context)): raise SaltInvocationError(('Invalid SELinux context string: {0}. ' + 'Expected "sel_user:sel_role:sel_type:sel_level"')) context_list = context.split(':', 3) ret = {} for (index, value) in enumerate(['sel_user', 'sel_role', 'sel_type', 'sel_level']): ret[value] = context_list[index] return ret
[ "def", "_context_string_to_dict", "(", "context", ")", ":", "if", "(", "not", "re", ".", "match", "(", "'[^:]+:[^:]+:[^:]+:[^:]+$'", ",", "context", ")", ")", ":", "raise", "SaltInvocationError", "(", "(", "'Invalid SELinux context string: {0}. '", "+", "'Expected \...
converts an selinux file context from string to dict .
train
true
33,402
def linearize(expr): expr = Constant.cast_to_const(expr) if expr.is_affine(): return expr else: tangent = expr.value if (tangent is None): raise ValueError('Cannot linearize non-affine expression with missing variable values.') grad_map = expr.grad for var in expr.variables(): if (grad_map[var] is None): return None elif var.is_matrix(): flattened = (Constant(grad_map[var]).T * vec((var - var.value))) tangent = (tangent + reshape(flattened, *expr.size)) else: tangent = (tangent + (Constant(grad_map[var]).T * (var - var.value))) return tangent
[ "def", "linearize", "(", "expr", ")", ":", "expr", "=", "Constant", ".", "cast_to_const", "(", "expr", ")", "if", "expr", ".", "is_affine", "(", ")", ":", "return", "expr", "else", ":", "tangent", "=", "expr", ".", "value", "if", "(", "tangent", "is"...
returns the tangent approximation to the expression .
train
false
33,403
def _daily_qs_for(model_cls): return model_cls.objects.filter(created__gte=date(2011, 1, 1), creator__is_active=1).extra(select={'day': 'extract( day from created )', 'month': 'extract( month from created )', 'year': 'extract( year from created )'}).values('year', 'month', 'day').annotate(count=Count('created'))
[ "def", "_daily_qs_for", "(", "model_cls", ")", ":", "return", "model_cls", ".", "objects", ".", "filter", "(", "created__gte", "=", "date", "(", "2011", ",", "1", ",", "1", ")", ",", "creator__is_active", "=", "1", ")", ".", "extra", "(", "select", "="...
return the daily grouped queryset we need for model_cls .
train
false
33,404
def is_valid_fqdn(fqdn): return (re.match('^[a-zA-Z0-9.-]+\\.[a-zA-Z]{2,}$', fqdn) is not None)
[ "def", "is_valid_fqdn", "(", "fqdn", ")", ":", "return", "(", "re", ".", "match", "(", "'^[a-zA-Z0-9.-]+\\\\.[a-zA-Z]{2,}$'", ",", "fqdn", ")", "is", "not", "None", ")" ]
verify whether a host is a valid fqdn .
train
false
33,405
def unwrap_order_by(clause): cols = util.column_set() result = [] stack = deque([clause]) while stack: t = stack.popleft() if (isinstance(t, ColumnElement) and ((not isinstance(t, UnaryExpression)) or (not operators.is_ordering_modifier(t.modifier)))): if isinstance(t, _label_reference): t = t.element if isinstance(t, _textual_label_reference): continue if (t not in cols): cols.add(t) result.append(t) else: for c in t.get_children(): stack.append(c) return result
[ "def", "unwrap_order_by", "(", "clause", ")", ":", "cols", "=", "util", ".", "column_set", "(", ")", "result", "=", "[", "]", "stack", "=", "deque", "(", "[", "clause", "]", ")", "while", "stack", ":", "t", "=", "stack", ".", "popleft", "(", ")", ...
break up an order by expression into individual column-expressions .
train
false
33,406
def test_data(): _test_raw_reader(read_raw_nicolet, input_fname=fname, ch_type='eeg', ecg='auto', eog='auto', emg='auto', misc=['PHO'])
[ "def", "test_data", "(", ")", ":", "_test_raw_reader", "(", "read_raw_nicolet", ",", "input_fname", "=", "fname", ",", "ch_type", "=", "'eeg'", ",", "ecg", "=", "'auto'", ",", "eog", "=", "'auto'", ",", "emg", "=", "'auto'", ",", "misc", "=", "[", "'PH...
test reading raw cnt files .
train
false
33,408
def allMatches(source, regex): pos = 0 end = len(source) rv = [] match = regex.search(source, pos) while match: rv.append(match) match = regex.search(source, match.end()) return rv
[ "def", "allMatches", "(", "source", ",", "regex", ")", ":", "pos", "=", "0", "end", "=", "len", "(", "source", ")", "rv", "=", "[", "]", "match", "=", "regex", ".", "search", "(", "source", ",", "pos", ")", "while", "match", ":", "rv", ".", "ap...
return a list of matches for regex in source .
train
false
33,409
def transfer_destroy(context, transfer_id): return IMPL.transfer_destroy(context, transfer_id)
[ "def", "transfer_destroy", "(", "context", ",", "transfer_id", ")", ":", "return", "IMPL", ".", "transfer_destroy", "(", "context", ",", "transfer_id", ")" ]
destroy a record in the volume transfer table .
train
false
33,410
def reset(): _runtime.reset()
[ "def", "reset", "(", ")", ":", "_runtime", ".", "reset", "(", ")" ]
reset the cached conf .
train
false
33,411
def matmul(self, rhs): if isinstance(rhs, variable.Variable): return MatMulVarVar()(self, rhs) _check_constant_type(rhs) return MatMulVarConst(rhs)(self)
[ "def", "matmul", "(", "self", ",", "rhs", ")", ":", "if", "isinstance", "(", "rhs", ",", "variable", ".", "Variable", ")", ":", "return", "MatMulVarVar", "(", ")", "(", "self", ",", "rhs", ")", "_check_constant_type", "(", "rhs", ")", "return", "MatMul...
matrix multiplication .
train
false
33,412
def tryall(context, prefix=None): context = context.copy() results = {} for (k, v) in context.iteritems(): if (not hasattr(v, '__call__')): continue if (prefix and (not k.startswith(prefix))): continue print (k + ':'), try: r = v() dictincr(results, r) print r except: print 'ERROR' dictincr(results, 'ERROR') print (' ' + '\n '.join(traceback.format_exc().split('\n'))) print ('-' * 40) print 'results:' for (k, v) in results.iteritems(): print (' ' * 2), (str(k) + ':'), v
[ "def", "tryall", "(", "context", ",", "prefix", "=", "None", ")", ":", "context", "=", "context", ".", "copy", "(", ")", "results", "=", "{", "}", "for", "(", "k", ",", "v", ")", "in", "context", ".", "iteritems", "(", ")", ":", "if", "(", "not...
tries a series of functions and prints their results .
train
false
33,413
def find_rescue_device(hardware_devices, instance): for device in hardware_devices.VirtualDevice: if ((device.__class__.__name__ == 'VirtualDisk') and (device.backing.__class__.__name__ == 'VirtualDiskFlatVer2BackingInfo') and device.backing.fileName.endswith('-rescue.vmdk')): return device msg = (_('Rescue device does not exist for instance %s') % instance.uuid) raise exception.NotFound(msg)
[ "def", "find_rescue_device", "(", "hardware_devices", ",", "instance", ")", ":", "for", "device", "in", "hardware_devices", ".", "VirtualDevice", ":", "if", "(", "(", "device", ".", "__class__", ".", "__name__", "==", "'VirtualDisk'", ")", "and", "(", "device"...
returns the rescue device .
train
false
33,415
def makeRst(prefix, section, app, exampleByIdentifier, schema_store): for route in sorted(getRoutes(app)): if route.attributes.get('private_api', False): continue data = _introspectRoute(route, exampleByIdentifier, schema_store) if (data['section'] != section): continue for method in route.methods: if (data['header'] is not None): (yield data['header']) (yield ('-' * len(data['header']))) (yield '') body = _formatRouteBody(data, schema_store) for line in http_directive(method, (prefix + route.path), body): (yield line)
[ "def", "makeRst", "(", "prefix", ",", "section", ",", "app", ",", "exampleByIdentifier", ",", "schema_store", ")", ":", "for", "route", "in", "sorted", "(", "getRoutes", "(", "app", ")", ")", ":", "if", "route", ".", "attributes", ".", "get", "(", "'pr...
generate the sphinx documentation associated with a l{klein} application .
train
false
33,419
@pytest.fixture(scope='session') def post_db_setup(translations_directory, django_db_setup, django_db_blocker, tests_use_db, tests_use_vfolders, request): if tests_use_db: with django_db_blocker.unblock(): PootleTestEnv().setup(vfolders=tests_use_vfolders)
[ "@", "pytest", ".", "fixture", "(", "scope", "=", "'session'", ")", "def", "post_db_setup", "(", "translations_directory", ",", "django_db_setup", ",", "django_db_blocker", ",", "tests_use_db", ",", "tests_use_vfolders", ",", "request", ")", ":", "if", "tests_use_...
sets up the site db for the test session .
train
false
33,420
def apodize(data, ms=5, rate=44100): hw_size = int(min((rate // (1000 / ms)), (len(data) // 15))) hamming_window = np.hamming(((2 * hw_size) + 1)) data[:hw_size] *= hamming_window[:hw_size] data[(- hw_size):] *= hamming_window[(- hw_size):] return data
[ "def", "apodize", "(", "data", ",", "ms", "=", "5", ",", "rate", "=", "44100", ")", ":", "hw_size", "=", "int", "(", "min", "(", "(", "rate", "//", "(", "1000", "/", "ms", ")", ")", ",", "(", "len", "(", "data", ")", "//", "15", ")", ")", ...
apply a hamming window to reduce a sounds click onset / offset .
train
false
33,421
def getProfileModel(profile): try: if (not isinstance(profile, ImageCmsProfile)): profile = ImageCmsProfile(profile) return (profile.profile.product_model + '\n') except (AttributeError, IOError, TypeError, ValueError) as v: raise PyCMSError(v)
[ "def", "getProfileModel", "(", "profile", ")", ":", "try", ":", "if", "(", "not", "isinstance", "(", "profile", ",", "ImageCmsProfile", ")", ")", ":", "profile", "=", "ImageCmsProfile", "(", "profile", ")", "return", "(", "profile", ".", "profile", ".", ...
gets the model for the given profile .
train
false
33,422
def colorbar_factory(cax, mappable, **kwargs): if (isinstance(mappable, contour.ContourSet) and any([(hatch is not None) for hatch in mappable.hatches])): cb = ColorbarPatch(cax, mappable, **kwargs) else: cb = Colorbar(cax, mappable, **kwargs) cid = mappable.callbacksSM.connect(u'changed', cb.on_mappable_changed) mappable.colorbar = cb mappable.colorbar_cid = cid return cb
[ "def", "colorbar_factory", "(", "cax", ",", "mappable", ",", "**", "kwargs", ")", ":", "if", "(", "isinstance", "(", "mappable", ",", "contour", ".", "ContourSet", ")", "and", "any", "(", "[", "(", "hatch", "is", "not", "None", ")", "for", "hatch", "...
creates a colorbar on the given axes for the given mappable .
train
false
33,423
def is_eulerian(G): if G.is_directed(): return (all(((G.in_degree(n) == G.out_degree(n)) for n in G)) and nx.is_strongly_connected(G)) return (all((((d % 2) == 0) for (v, d) in G.degree())) and nx.is_connected(G))
[ "def", "is_eulerian", "(", "G", ")", ":", "if", "G", ".", "is_directed", "(", ")", ":", "return", "(", "all", "(", "(", "(", "G", ".", "in_degree", "(", "n", ")", "==", "G", ".", "out_degree", "(", "n", ")", ")", "for", "n", "in", "G", ")", ...
returns true if and only if g is eulerian .
train
false
33,424
def write_pack(filename, objects, deltify=None, delta_window_size=None): with GitFile((filename + '.pack'), 'wb') as f: (entries, data_sum) = write_pack_objects(f, objects, delta_window_size=delta_window_size, deltify=deltify) entries = [(k, v[0], v[1]) for (k, v) in entries.items()] entries.sort() with GitFile((filename + '.idx'), 'wb') as f: return (data_sum, write_pack_index_v2(f, entries, data_sum))
[ "def", "write_pack", "(", "filename", ",", "objects", ",", "deltify", "=", "None", ",", "delta_window_size", "=", "None", ")", ":", "with", "GitFile", "(", "(", "filename", "+", "'.pack'", ")", ",", "'wb'", ")", "as", "f", ":", "(", "entries", ",", "...
write a new pack data file .
train
false
33,427
def path_component(s): res = r_path_component.findall(base_url(s)) return ((res and res[0]) or s)
[ "def", "path_component", "(", "s", ")", ":", "res", "=", "r_path_component", ".", "findall", "(", "base_url", "(", "s", ")", ")", "return", "(", "(", "res", "and", "res", "[", "0", "]", ")", "or", "s", ")" ]
takes a url URL and returns i/like/cheese .
train
false
33,429
def length_lte(value, arg): return (len(value) <= int(arg))
[ "def", "length_lte", "(", "value", ",", "arg", ")", ":", "return", "(", "len", "(", "value", ")", "<=", "int", "(", "arg", ")", ")" ]
returns a boolean of whether the values length is less than or equal to the argument .
train
false
33,430
@handle_response_format @treeio_login_required def account_password(request, response_format='html'): profile = request.user.profile if request.POST: if ('cancel' not in request.POST): form = AccountPasswordForm(request.user, request.POST) if form.is_valid(): form.save() return HttpResponseRedirect(reverse('account_view')) else: return HttpResponseRedirect(reverse('account_view')) else: form = AccountPasswordForm(request.user) return render_to_response('account/account_password', {'profile': profile, 'form': Markup(form.as_ul())}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "account_password", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "profile", "=", "request", ".", "user", ".", "profile", "if", "request", ".", "POST", ":", "if", "(", "'can...
change password form .
train
false
33,433
def get_chassis_location(host=None, admin_username=None, admin_password=None): return system_info(host=host, admin_username=admin_username, admin_password=admin_password)['Chassis Information']['Chassis Location']
[ "def", "get_chassis_location", "(", "host", "=", "None", ",", "admin_username", "=", "None", ",", "admin_password", "=", "None", ")", ":", "return", "system_info", "(", "host", "=", "host", ",", "admin_username", "=", "admin_username", ",", "admin_password", "...
get the location of the chassis .
train
true
33,435
def flip_axis(x, axis, is_random=False): if is_random: factor = np.random.uniform((-1), 1) if (factor > 0): x = np.asarray(x).swapaxes(axis, 0) x = x[::(-1), ...] x = x.swapaxes(0, axis) return x else: return x else: x = np.asarray(x).swapaxes(axis, 0) x = x[::(-1), ...] x = x.swapaxes(0, axis) return x
[ "def", "flip_axis", "(", "x", ",", "axis", ",", "is_random", "=", "False", ")", ":", "if", "is_random", ":", "factor", "=", "np", ".", "random", ".", "uniform", "(", "(", "-", "1", ")", ",", "1", ")", "if", "(", "factor", ">", "0", ")", ":", ...
flip the axis of an image .
train
true
33,436
def onGlobalData(key, value): DEBUG_MSG(('onGlobalData: %s' % key))
[ "def", "onGlobalData", "(", "key", ",", "value", ")", ":", "DEBUG_MSG", "(", "(", "'onGlobalData: %s'", "%", "key", ")", ")" ]
kbengine method .
train
false
33,437
def get_zk_node_ips(): try: info = file_io.read(constants.ZK_LOCATIONS_JSON_FILE) zk_json = json.loads(info) return zk_json['locations'] except IOError as io_error: logging.exception(io_error) return [] except ValueError as value_error: logging.exception(value_error) return [] except TypeError as type_error: logging.exception(type_error) return [] except KeyError as key_error: logging.exception(key_error) return []
[ "def", "get_zk_node_ips", "(", ")", ":", "try", ":", "info", "=", "file_io", ".", "read", "(", "constants", ".", "ZK_LOCATIONS_JSON_FILE", ")", "zk_json", "=", "json", ".", "loads", "(", "info", ")", "return", "zk_json", "[", "'locations'", "]", "except", ...
returns a list of zookeeper node ips .
train
false
33,438
@pytest.mark.django_db def test_backend_db(): from django.db import connection, connections if (connection.vendor == 'sqlite'): assert (connections.databases['default']['NAME'] == ':memory:')
[ "@", "pytest", ".", "mark", ".", "django_db", "def", "test_backend_db", "(", ")", ":", "from", "django", ".", "db", "import", "connection", ",", "connections", "if", "(", "connection", ".", "vendor", "==", "'sqlite'", ")", ":", "assert", "(", "connections"...
ensure that we are always testing sqlite on fast in memory db .
train
false
33,439
def params_to_dict(params, dct): for (param, val) in params.items(): if (val is None): continue dct[param] = val return dct
[ "def", "params_to_dict", "(", "params", ",", "dct", ")", ":", "for", "(", "param", ",", "val", ")", "in", "params", ".", "items", "(", ")", ":", "if", "(", "val", "is", "None", ")", ":", "continue", "dct", "[", "param", "]", "=", "val", "return",...
updates the dct dictionary with the params dictionary .
train
true
33,440
def get_capabilities(http_conn): (parsed, conn) = http_conn headers = {'Accept-Encoding': 'gzip'} conn.request('GET', parsed.path, '', headers) resp = conn.getresponse() body = resp.read() http_log((parsed.geturl(), 'GET'), {'headers': headers}, resp, body) if ((resp.status < 200) or (resp.status >= 300)): raise ClientException.from_response(resp, 'Capabilities GET failed', body) resp_headers = resp_header_dict(resp) return parse_api_response(resp_headers, body)
[ "def", "get_capabilities", "(", "http_conn", ")", ":", "(", "parsed", ",", "conn", ")", "=", "http_conn", "headers", "=", "{", "'Accept-Encoding'", ":", "'gzip'", "}", "conn", ".", "request", "(", "'GET'", ",", "parsed", ".", "path", ",", "''", ",", "h...
list all capabilities on the system args: image : the path to the root directory of an offline windows image .
train
false
33,441
def renderElement(request, element, doctype='<!DOCTYPE html>', _failElement=None): if (doctype is not None): request.write(doctype) request.write('\n') if (_failElement is None): _failElement = twisted.web.util.FailureElement d = flatten(request, element, request.write) def eb(failure): log.err(failure, 'An error occurred while rendering the response.') if request.site.displayTracebacks: return flatten(request, _failElement(failure), request.write).encode('utf8') else: request.write('<div style="font-size:800%;background-color:#FFF;color:#F00">An error occurred while rendering the response.</div>') d.addErrback(eb) d.addBoth((lambda _: request.finish())) return NOT_DONE_YET
[ "def", "renderElement", "(", "request", ",", "element", ",", "doctype", "=", "'<!DOCTYPE html>'", ",", "_failElement", "=", "None", ")", ":", "if", "(", "doctype", "is", "not", "None", ")", ":", "request", ".", "write", "(", "doctype", ")", "request", "....
render an element or other c{irenderable} .
train
false
33,442
def is_dnf(expr): return _is_form(expr, Or, And)
[ "def", "is_dnf", "(", "expr", ")", ":", "return", "_is_form", "(", "expr", ",", "Or", ",", "And", ")" ]
test whether or not an expression is in disjunctive normal form .
train
false
33,443
def gf_monic(f, p, K): if (not f): return (K.zero, []) else: lc = f[0] if K.is_one(lc): return (lc, list(f)) else: return (lc, gf_quo_ground(f, lc, p, K))
[ "def", "gf_monic", "(", "f", ",", "p", ",", "K", ")", ":", "if", "(", "not", "f", ")", ":", "return", "(", "K", ".", "zero", ",", "[", "]", ")", "else", ":", "lc", "=", "f", "[", "0", "]", "if", "K", ".", "is_one", "(", "lc", ")", ":", ...
compute lc and a monic polynomial in gf(p)[x] .
train
false
33,444
def _add_gamma_multipliers(bem): bem['sigma'] = np.array([surf['sigma'] for surf in bem['surfs']]) sigma = np.r_[(0.0, bem['sigma'])] bem['source_mult'] = (2.0 / (sigma[1:] + sigma[:(-1)])) bem['field_mult'] = (sigma[1:] - sigma[:(-1)]) assert (len(bem['surfs']) == len(bem['field_mult'])) bem['gamma'] = ((sigma[1:] - sigma[:(-1)])[np.newaxis, :] / (sigma[1:] + sigma[:(-1)])[:, np.newaxis])
[ "def", "_add_gamma_multipliers", "(", "bem", ")", ":", "bem", "[", "'sigma'", "]", "=", "np", ".", "array", "(", "[", "surf", "[", "'sigma'", "]", "for", "surf", "in", "bem", "[", "'surfs'", "]", "]", ")", "sigma", "=", "np", ".", "r_", "[", "(",...
helper to add gamma and multipliers in-place .
train
false
33,445
def _make_inventory_object(resource_provider, resource_class, **data): try: inventory = objects.Inventory(resource_provider=resource_provider, resource_class=resource_class, **data) except (ValueError, TypeError) as exc: raise webob.exc.HTTPBadRequest((_('Bad inventory %(class)s for resource provider %(rp_uuid)s: %(error)s') % {'class': resource_class, 'rp_uuid': resource_provider.uuid, 'error': exc}), json_formatter=util.json_error_formatter) return inventory
[ "def", "_make_inventory_object", "(", "resource_provider", ",", "resource_class", ",", "**", "data", ")", ":", "try", ":", "inventory", "=", "objects", ".", "Inventory", "(", "resource_provider", "=", "resource_provider", ",", "resource_class", "=", "resource_class"...
single place to catch malformed inventories .
train
false
33,446
def datacite_metadata_for_node(node, doi, pretty_print=False): def format_contrib(contributor): return u'{}, {}'.format(contributor.family_name, contributor.given_name) creators = [format_contrib(each) for each in node.visible_contributors] return datacite_metadata(doi=doi, title=node.title, creators=creators, publisher='Open Science Framework', publication_year=getattr((node.registered_date or node.date_created), 'year'), pretty_print=pretty_print)
[ "def", "datacite_metadata_for_node", "(", "node", ",", "doi", ",", "pretty_print", "=", "False", ")", ":", "def", "format_contrib", "(", "contributor", ")", ":", "return", "u'{}, {}'", ".", "format", "(", "contributor", ".", "family_name", ",", "contributor", ...
return the datacite metadata xml document for a given node as a string .
train
false
33,448
def browse_current(): branch = gitcmds.current_branch() BrowseDialog.browse(branch)
[ "def", "browse_current", "(", ")", ":", "branch", "=", "gitcmds", ".", "current_branch", "(", ")", "BrowseDialog", ".", "browse", "(", "branch", ")" ]
launch the browse current branch dialog .
train
false
33,450
def get_next_unique_id(model, field, value): condition = {} condition[('%s__iregex' % field)] = ('^%s[0-9]+$' % value) values = model.objects.filter(**condition).values_list(field, flat=True) integers = map((lambda x: int(x.replace(value, ''))), values) all_values = range(1, (len(integers) + 2)) gap = list((set(all_values) - set(integers)))[0] new_field_value = ('%s%d' % (value, gap)) return new_field_value
[ "def", "get_next_unique_id", "(", "model", ",", "field", ",", "value", ")", ":", "condition", "=", "{", "}", "condition", "[", "(", "'%s__iregex'", "%", "field", ")", "]", "=", "(", "'^%s[0-9]+$'", "%", "value", ")", "values", "=", "model", ".", "objec...
find next available incrementing value for a field in model .
train
false
33,451
def _format_translation(message, variables=None): if (variables is not None): try: return (message % variables) except: pass return message
[ "def", "_format_translation", "(", "message", ",", "variables", "=", "None", ")", ":", "if", "(", "variables", "is", "not", "None", ")", ":", "try", ":", "return", "(", "message", "%", "variables", ")", "except", ":", "pass", "return", "message" ]
overrides the gettext function .
train
false
33,453
@users.command('new') @click.option('--username', '-u', help='The username of the user.') @click.option('--email', '-e', type=EmailType(), help='The email address of the user.') @click.option('--password', '-p', help='The password of the user.') @click.option('--group', '-g', help='The group of the user.', type=click.Choice(['admin', 'super_mod', 'mod', 'member'])) def new_user(username, email, password, group): try: user = save_user_prompt(username, email, password, group) click.secho('[+] User {} with Email {} in Group {} created.'.format(user.username, user.email, user.primary_group.name), fg='cyan') except IntegrityError: raise FlaskBBCLIError("Couldn't create the user because the username or email address is already taken.", fg='red')
[ "@", "users", ".", "command", "(", "'new'", ")", "@", "click", ".", "option", "(", "'--username'", ",", "'-u'", ",", "help", "=", "'The username of the user.'", ")", "@", "click", ".", "option", "(", "'--email'", ",", "'-e'", ",", "type", "=", "EmailType...
creates a new user .
train
false
33,454
def rel_posix_to_rel_local(path, environ=None): if (environ is None): environ = os.environ return os.path.join(*path.split('/'))
[ "def", "rel_posix_to_rel_local", "(", "path", ",", "environ", "=", "None", ")", ":", "if", "(", "environ", "is", "None", ")", ":", "environ", "=", "os", ".", "environ", "return", "os", ".", "path", ".", "join", "(", "*", "path", ".", "split", "(", ...
convert a posix path to the current systems format .
train
false
33,456
def get_sentinel_object(): return Other3()
[ "def", "get_sentinel_object", "(", ")", ":", "return", "Other3", "(", ")" ]
a function to return the object to be used in place of any deleted object .
train
false
33,457
def _save_mean(mean, filename): if filename.endswith('.binaryproto'): blob = caffe_pb2.BlobProto() blob.num = 1 blob.channels = 1 (blob.height, blob.width) = mean.shape blob.data.extend(mean.astype(float).flat) with open(filename, 'wb') as outfile: outfile.write(blob.SerializeToString()) elif filename.endswith(('.jpg', '.jpeg', '.png')): image = PIL.Image.fromarray(mean) image.save(filename) else: raise ValueError('unrecognized file extension')
[ "def", "_save_mean", "(", "mean", ",", "filename", ")", ":", "if", "filename", ".", "endswith", "(", "'.binaryproto'", ")", ":", "blob", "=", "caffe_pb2", ".", "BlobProto", "(", ")", "blob", ".", "num", "=", "1", "blob", ".", "channels", "=", "1", "(...
saves mean to file arguments: mean -- the mean as an np .
train
false
33,460
@register.tag def get_admin_log(parser, token): tokens = token.contents.split() if (len(tokens) < 4): raise template.TemplateSyntaxError("'get_admin_log' statements require two arguments") if (not tokens[1].isdigit()): raise template.TemplateSyntaxError("First argument to 'get_admin_log' must be an integer") if (tokens[2] != 'as'): raise template.TemplateSyntaxError("Second argument to 'get_admin_log' must be 'as'") if (len(tokens) > 4): if (tokens[4] != 'for_user'): raise template.TemplateSyntaxError("Fourth argument to 'get_admin_log' must be 'for_user'") return AdminLogNode(limit=tokens[1], varname=tokens[3], user=(tokens[5] if (len(tokens) > 5) else None))
[ "@", "register", ".", "tag", "def", "get_admin_log", "(", "parser", ",", "token", ")", ":", "tokens", "=", "token", ".", "contents", ".", "split", "(", ")", "if", "(", "len", "(", "tokens", ")", "<", "4", ")", ":", "raise", "template", ".", "Templa...
populates a template variable with the admin log for the given criteria .
train
false
33,461
def _accessible_libraries_list(user): return [lib for lib in modulestore().get_libraries() if has_studio_read_access(user, lib.location.library_key)]
[ "def", "_accessible_libraries_list", "(", "user", ")", ":", "return", "[", "lib", "for", "lib", "in", "modulestore", "(", ")", ".", "get_libraries", "(", ")", "if", "has_studio_read_access", "(", "user", ",", "lib", ".", "location", ".", "library_key", ")", ...
list all libraries available to the logged in user by iterating through all libraries .
train
false
33,463
def test_dcos_client_api(mock_dcos_client): args = get_args_from_env() args['auth_user'] = None cluster = DcosApiSession(**args) r = cluster.get('', node='123.123.123.123') r.raise_for_status() cluster.get('') cluster.post('') cluster.put('') cluster.delete('') cluster.head('') cluster.patch('') cluster.options('')
[ "def", "test_dcos_client_api", "(", "mock_dcos_client", ")", ":", "args", "=", "get_args_from_env", "(", ")", "args", "[", "'auth_user'", "]", "=", "None", "cluster", "=", "DcosApiSession", "(", "**", "args", ")", "r", "=", "cluster", ".", "get", "(", "''"...
tests two critical aspects of the dcosapisession 1 .
train
false
33,465
def createAMQPListener(username, password, vhost, exchange_name, spec=None, channel=1, verbose=False): if (not spec): spec = txamqp.spec.load(os.path.normpath(os.path.join(os.path.dirname(__file__), 'amqp0-8.xml'))) delegate = TwistedDelegate() factory = AMQPReconnectingFactory(username, password, delegate, vhost, spec, channel, exchange_name, verbose=verbose) return factory
[ "def", "createAMQPListener", "(", "username", ",", "password", ",", "vhost", ",", "exchange_name", ",", "spec", "=", "None", ",", "channel", "=", "1", ",", "verbose", "=", "False", ")", ":", "if", "(", "not", "spec", ")", ":", "spec", "=", "txamqp", ...
create an c{amqpreconnectingfactory} configured with the specified options .
train
false
33,466
def size(key, shape): dims = [] for i in range(2): selection = np.arange(shape[i])[key[i]] size = np.size(selection) dims.append(size) return tuple(dims)
[ "def", "size", "(", "key", ",", "shape", ")", ":", "dims", "=", "[", "]", "for", "i", "in", "range", "(", "2", ")", ":", "selection", "=", "np", ".", "arange", "(", "shape", "[", "i", "]", ")", "[", "key", "[", "i", "]", "]", "size", "=", ...
size -> str convert the length of a bytestream to human readable form .
train
false
33,468
def manipulateElementNode(elementNode, target): derivation = TranslateDerivation(elementNode) if (derivation.translateTetragrid == None): print 'Warning, translateTetragrid was None in translate so nothing will be done for:' print elementNode return matrix.setAttributesToMultipliedTetragrid(target, derivation.translateTetragrid)
[ "def", "manipulateElementNode", "(", "elementNode", ",", "target", ")", ":", "derivation", "=", "TranslateDerivation", "(", "elementNode", ")", "if", "(", "derivation", ".", "translateTetragrid", "==", "None", ")", ":", "print", "'Warning, translateTetragrid was None ...
manipulate the xml element .
train
false
33,469
@contextlib.contextmanager def _dummy_vm(session, instance, vdi_ref): name_label = 'dummy' vm_ref = create_vm(session, instance, name_label, None, None) try: vbd_ref = create_vbd(session, vm_ref, vdi_ref, 'autodetect', read_only=True) try: (yield vm_ref) finally: try: destroy_vbd(session, vbd_ref) except exception.StorageError: pass finally: destroy_vm(session, instance, vm_ref)
[ "@", "contextlib", ".", "contextmanager", "def", "_dummy_vm", "(", "session", ",", "instance", ",", "vdi_ref", ")", ":", "name_label", "=", "'dummy'", "vm_ref", "=", "create_vm", "(", "session", ",", "instance", ",", "name_label", ",", "None", ",", "None", ...
this creates a temporary vm so that we can snapshot a vdi .
train
false
33,470
def do_dictsort(value, case_sensitive=False, by='key'): if (by == 'key'): pos = 0 elif (by == 'value'): pos = 1 else: raise FilterArgumentError('You can only sort by either "key" or "value"') def sort_func(item): value = item[pos] if (isinstance(value, basestring) and (not case_sensitive)): value = value.lower() return value return sorted(value.items(), key=sort_func)
[ "def", "do_dictsort", "(", "value", ",", "case_sensitive", "=", "False", ",", "by", "=", "'key'", ")", ":", "if", "(", "by", "==", "'key'", ")", ":", "pos", "=", "0", "elif", "(", "by", "==", "'value'", ")", ":", "pos", "=", "1", "else", ":", "...
sort a dict and yield pairs .
train
true
33,473
def get_html_attrs(attrs): def _massage_attribute(value): if isinstance(value, (list, tuple)): return join_css_classes(value) return value attrs = dict(((key, _massage_attribute(value)) for (key, value) in six.iteritems(attrs) if (key and value))) return flatatt(attrs)
[ "def", "get_html_attrs", "(", "attrs", ")", ":", "def", "_massage_attribute", "(", "value", ")", ":", "if", "isinstance", "(", "value", ",", "(", "list", ",", "tuple", ")", ")", ":", "return", "join_css_classes", "(", "value", ")", "return", "value", "at...
flatten a dict into html attributes .
train
false
33,474
def test_clone_unknown_subprocess_error(mocker, clone_dir): mocker.patch('cookiecutter.vcs.subprocess.check_output', autospec=True, side_effect=[subprocess.CalledProcessError((-1), 'cmd', output='Something went wrong')]) with pytest.raises(subprocess.CalledProcessError): vcs.clone('https://github.com/pytest-dev/cookiecutter-pytest-plugin', clone_to_dir=clone_dir, no_input=True)
[ "def", "test_clone_unknown_subprocess_error", "(", "mocker", ",", "clone_dir", ")", ":", "mocker", ".", "patch", "(", "'cookiecutter.vcs.subprocess.check_output'", ",", "autospec", "=", "True", ",", "side_effect", "=", "[", "subprocess", ".", "CalledProcessError", "("...
in clone() .
train
false
33,475
def ParseJSONRequest(request): content_type = request.headers.get('Content-Type', '') if (not any((content_type.startswith(x) for x in _CONTENT_TYPES))): raise web.HTTPError(400, ('bad request content type: %s' % content_type)) json_dict = json.loads(request.body) return json_dict
[ "def", "ParseJSONRequest", "(", "request", ")", ":", "content_type", "=", "request", ".", "headers", ".", "get", "(", "'Content-Type'", ",", "''", ")", "if", "(", "not", "any", "(", "(", "content_type", ".", "startswith", "(", "x", ")", "for", "x", "in...
parse the json-encoded contents of the request body and return the python data object .
train
false
33,476
def lookupMailGroup(name, timeout=None): return getResolver().lookupMailGroup(name, timeout)
[ "def", "lookupMailGroup", "(", "name", ",", "timeout", "=", "None", ")", ":", "return", "getResolver", "(", ")", ".", "lookupMailGroup", "(", "name", ",", "timeout", ")" ]
perform an mg record lookup .
train
false
33,478
def render_template_with_bootstrap(text, context=None): if (not context): context = {} return render_template((u'{% load bootstrap3 %}' + text), context)
[ "def", "render_template_with_bootstrap", "(", "text", ",", "context", "=", "None", ")", ":", "if", "(", "not", "context", ")", ":", "context", "=", "{", "}", "return", "render_template", "(", "(", "u'{% load bootstrap3 %}'", "+", "text", ")", ",", "context",...
create a template text that first loads bootstrap3 .
train
false
33,479
def manual_install(file_): global PLUGIN_EXTENSION plugins_installed_before = set(__get_all_plugin_descriptors()) fileName = os.path.join(resources.PLUGINS, os.path.basename(file_)) shutil.copyfile(file_, fileName) zipFile = zipfile.ZipFile(fileName, u'r') zipFile.extractall(resources.PLUGINS) zipFile.close() os.remove(fileName) plugins_installed_after = set(__get_all_plugin_descriptors()) new_plugin = (plugins_installed_after - plugins_installed_before).pop() return new_plugin
[ "def", "manual_install", "(", "file_", ")", ":", "global", "PLUGIN_EXTENSION", "plugins_installed_before", "=", "set", "(", "__get_all_plugin_descriptors", "(", ")", ")", "fileName", "=", "os", ".", "path", ".", "join", "(", "resources", ".", "PLUGINS", ",", "...
copy zip file and install .
train
false
33,480
def flattenEvent(event): if (event.get('log_format', None) is None): return if ('log_flattened' in event): fields = event['log_flattened'] else: fields = {} keyFlattener = KeyFlattener() for (literalText, fieldName, formatSpec, conversion) in aFormatter.parse(event['log_format']): if (fieldName is None): continue if (conversion != 'r'): conversion = 's' flattenedKey = keyFlattener.flatKey(fieldName, formatSpec, conversion) structuredKey = keyFlattener.flatKey(fieldName, formatSpec, '') if (flattenedKey in fields): continue if fieldName.endswith(u'()'): fieldName = fieldName[:(-2)] callit = True else: callit = False field = aFormatter.get_field(fieldName, (), event) fieldValue = field[0] if (conversion == 'r'): conversionFunction = repr else: conversionFunction = unicode if callit: fieldValue = fieldValue() flattenedValue = conversionFunction(fieldValue) fields[flattenedKey] = flattenedValue fields[structuredKey] = fieldValue if fields: event['log_flattened'] = fields
[ "def", "flattenEvent", "(", "event", ")", ":", "if", "(", "event", ".", "get", "(", "'log_format'", ",", "None", ")", "is", "None", ")", ":", "return", "if", "(", "'log_flattened'", "in", "event", ")", ":", "fields", "=", "event", "[", "'log_flattened'...
flatten the given event by pre-associating format fields with specific objects and callable results in a l{dict} put into the c{"log_flattened"} key in the event .
train
false
33,481
def _maybe_fill(arr, fill_value=np.nan): if _is_na_compat(arr, fill_value): arr.fill(fill_value) return arr
[ "def", "_maybe_fill", "(", "arr", ",", "fill_value", "=", "np", ".", "nan", ")", ":", "if", "_is_na_compat", "(", "arr", ",", "fill_value", ")", ":", "arr", ".", "fill", "(", "fill_value", ")", "return", "arr" ]
if we have a compatiable fill_value and arr dtype .
train
false
33,482
def _fix_ctx(m2_ctx, issuer=None): ctx = _Ctx.from_address(int(m2_ctx)) ctx.flags = 0 ctx.subject_cert = None ctx.subject_req = None ctx.crl = None if (issuer is None): ctx.issuer_cert = None else: ctx.issuer_cert = int(issuer.x509)
[ "def", "_fix_ctx", "(", "m2_ctx", ",", "issuer", "=", "None", ")", ":", "ctx", "=", "_Ctx", ".", "from_address", "(", "int", "(", "m2_ctx", ")", ")", "ctx", ".", "flags", "=", "0", "ctx", ".", "subject_cert", "=", "None", "ctx", ".", "subject_req", ...
this is part of an ugly hack to fix an ancient bug in m2crypto URL#c13 .
train
true
33,483
def xml_find(xpath): def xpath_find(value): validate(ET.iselement, value) value = value.find(xpath) if (value is None): raise ValueError("XPath '{0}' did not return an element".format(xpath)) return validate(ET.iselement, value) return transform(xpath_find)
[ "def", "xml_find", "(", "xpath", ")", ":", "def", "xpath_find", "(", "value", ")", ":", "validate", "(", "ET", ".", "iselement", ",", "value", ")", "value", "=", "value", ".", "find", "(", "xpath", ")", "if", "(", "value", "is", "None", ")", ":", ...
find a xml element via xpath .
train
true
33,484
def dict2str(data): result = '' for k in data: if (data[k] is not None): result += ('%s %s\n' % (str(k), str(data[k]))) else: result += ('%s\n' % str(k)) return result
[ "def", "dict2str", "(", "data", ")", ":", "result", "=", "''", "for", "k", "in", "data", ":", "if", "(", "data", "[", "k", "]", "is", "not", "None", ")", ":", "result", "+=", "(", "'%s %s\\n'", "%", "(", "str", "(", "k", ")", ",", "str", "(",...
create a string with a whitespace and newline delimited text from a dictionary .
train
false
33,485
def lte(name, value): ret = {'name': name, 'result': False, 'comment': '', 'changes': {}} if (name not in __reg__): ret['result'] = False ret['comment'] = 'Value {0} not in register'.format(name) return ret if (__reg__[name]['val'] <= value): ret['result'] = True return ret
[ "def", "lte", "(", "name", ",", "value", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'result'", ":", "False", ",", "'comment'", ":", "''", ",", "'changes'", ":", "{", "}", "}", "if", "(", "name", "not", "in", "__reg__", ")", ":", "r...
returns a boolean of whether the value is less than or equal to the argument .
train
true
33,487
@pytest.mark.parametrize('invalid_value', [None, (), 69, 69L]) def test_unparse_multistring_invalid(invalid_value): assert (unparse_multistring(invalid_value) == invalid_value)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'invalid_value'", ",", "[", "None", ",", "(", ")", ",", "69", ",", "69", "L", "]", ")", "def", "test_unparse_multistring_invalid", "(", "invalid_value", ")", ":", "assert", "(", "unparse_multistring", "...
tests unparsing does nothing for unsupported values .
train
false
33,488
def read_pid_file(filename): try: (pid, port) = open(filename, 'r').readlines() (pid, port) = (int(pid), int(port)) except ValueError: try: (pid, port) = (int(open(filename, 'r').read()), None) except ValueError: (pid, port) = (None, None) return (pid, port)
[ "def", "read_pid_file", "(", "filename", ")", ":", "try", ":", "(", "pid", ",", "port", ")", "=", "open", "(", "filename", ",", "'r'", ")", ".", "readlines", "(", ")", "(", "pid", ",", "port", ")", "=", "(", "int", "(", "pid", ")", ",", "int", ...
reads a pid file and returns the contents .
train
false