id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
33,627
def create_course_image_thumbnail(course, dimensions): course_image_asset_key = StaticContent.compute_location(course.id, course.course_image) course_image = AssetManager.find(course_image_asset_key) (_content, thumb_loc) = contentstore().generate_thumbnail(course_image, dimensions=dimensions) return StaticContent.serialize_asset_key_with_slash(thumb_loc)
[ "def", "create_course_image_thumbnail", "(", "course", ",", "dimensions", ")", ":", "course_image_asset_key", "=", "StaticContent", ".", "compute_location", "(", "course", ".", "id", ",", "course", ".", "course_image", ")", "course_image", "=", "AssetManager", ".", ...
create a course image thumbnail and return the url .
train
false
33,632
@click.command(name='show_snapshots') @click.option('--repository', type=str, required=True, help='Snapshot repository name') @click.option('--ignore_empty_list', is_flag=True, help='Do not raise exception if there are no actionable snapshots') @click.option('--filter_list', callback=validate_filter_json, default='{"filtertype":"none"}', help='JSON string representing an array of filters.') @click.pass_context def show_snapshots_singleton(ctx, repository, ignore_empty_list, filter_list): action = 'delete_snapshots' c_args = ctx.obj['config']['client'] client = get_client(**c_args) logger = logging.getLogger(__name__) logger.debug('Validating provided filters: {0}'.format(filter_list)) clean_filters = {'filters': filter_schema_check(action, filter_list)} slo = SnapshotList(client, repository=repository) _do_filters(slo, clean_filters, ignore_empty_list) snapshots = sorted(slo.snapshots) for idx in snapshots: click.secho('{0}'.format(idx))
[ "@", "click", ".", "command", "(", "name", "=", "'show_snapshots'", ")", "@", "click", ".", "option", "(", "'--repository'", ",", "type", "=", "str", ",", "required", "=", "True", ",", "help", "=", "'Snapshot repository name'", ")", "@", "click", ".", "o...
show snapshots .
train
false
33,633
def getHashProcessor(kexAlgorithm): kex = getKex(kexAlgorithm) return kex.hashProcessor
[ "def", "getHashProcessor", "(", "kexAlgorithm", ")", ":", "kex", "=", "getKex", "(", "kexAlgorithm", ")", "return", "kex", ".", "hashProcessor" ]
get the hash algorithm callable to use in key exchange .
train
false
33,634
def dependent(a, b): if pspace_independent(a, b): return False z = Symbol('z', real=True) return ((density(a, Eq(b, z)) != density(a)) or (density(b, Eq(a, z)) != density(b)))
[ "def", "dependent", "(", "a", ",", "b", ")", ":", "if", "pspace_independent", "(", "a", ",", "b", ")", ":", "return", "False", "z", "=", "Symbol", "(", "'z'", ",", "real", "=", "True", ")", "return", "(", "(", "density", "(", "a", ",", "Eq", "(...
dependence of two random expressions two expressions are independent if knowledge of one does not change computations on the other .
train
false
33,635
def list_hardware(): with open(os.devnull, 'w') as devnull: return check_output(['lshw', '-quiet', '-json'], stderr=devnull)
[ "def", "list_hardware", "(", ")", ":", "with", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "as", "devnull", ":", "return", "check_output", "(", "[", "'lshw'", ",", "'-quiet'", ",", "'-json'", "]", ",", "stderr", "=", "devnull", ")" ]
list the hardware on the local machine .
train
false
33,636
def add_form_errors_as_messages(request, form): n_messages = 0 for (field_name, errors) in form.errors.items(): if (field_name != NON_FIELD_ERRORS): field_label = form[field_name].label else: field_label = u'' for error in errors: messages.error(request, (u'%s %s' % (field_label, error))) n_messages += 1 return n_messages
[ "def", "add_form_errors_as_messages", "(", "request", ",", "form", ")", ":", "n_messages", "=", "0", "for", "(", "field_name", ",", "errors", ")", "in", "form", ".", "errors", ".", "items", "(", ")", ":", "if", "(", "field_name", "!=", "NON_FIELD_ERRORS", ...
add the forms errors .
train
false
33,637
def assert_fp_equal(x, y, err_msg='', nulp=50): try: assert_array_almost_equal_nulp(x, y, nulp) except AssertionError as e: raise AssertionError(('%s\n%s' % (e, err_msg)))
[ "def", "assert_fp_equal", "(", "x", ",", "y", ",", "err_msg", "=", "''", ",", "nulp", "=", "50", ")", ":", "try", ":", "assert_array_almost_equal_nulp", "(", "x", ",", "y", ",", "nulp", ")", "except", "AssertionError", "as", "e", ":", "raise", "Asserti...
assert two arrays are equal .
train
false
33,639
def is_relative_path(p): p = p.replace('\\', '/') if (p and (p[0] == '/')): return False if (sabnzbd.WIN32 and p and (len(p) > 2)): if (p[0].isalpha() and (p[1] == ':') and (p[2] == '/')): return False return True
[ "def", "is_relative_path", "(", "p", ")", ":", "p", "=", "p", ".", "replace", "(", "'\\\\'", ",", "'/'", ")", "if", "(", "p", "and", "(", "p", "[", "0", "]", "==", "'/'", ")", ")", ":", "return", "False", "if", "(", "sabnzbd", ".", "WIN32", "...
return true if path is relative .
train
false
33,641
def get_oauthlib_core(): validator = oauth2_settings.OAUTH2_VALIDATOR_CLASS() server = oauth2_settings.OAUTH2_SERVER_CLASS(validator) return oauth2_settings.OAUTH2_BACKEND_CLASS(server)
[ "def", "get_oauthlib_core", "(", ")", ":", "validator", "=", "oauth2_settings", ".", "OAUTH2_VALIDATOR_CLASS", "(", ")", "server", "=", "oauth2_settings", ".", "OAUTH2_SERVER_CLASS", "(", "validator", ")", "return", "oauth2_settings", ".", "OAUTH2_BACKEND_CLASS", "(",...
utility function that take a request and returns an instance of oauth2_provider .
train
false
33,642
def rotN(l, N): assert (len(l) >= N), "Can't rotate list by longer than its length." return (l[N:] + l[:N])
[ "def", "rotN", "(", "l", ",", "N", ")", ":", "assert", "(", "len", "(", "l", ")", ">=", "N", ")", ",", "\"Can't rotate list by longer than its length.\"", "return", "(", "l", "[", "N", ":", "]", "+", "l", "[", ":", "N", "]", ")" ]
rotate a list of elements .
train
false
33,643
def getLoopsUnion(importRadius, loopLists): allPoints = [] corners = getLoopsListsIntersections(loopLists) radiusSideNegative = ((-0.01) * importRadius) intercircle.directLoopLists(True, loopLists) for loopListIndex in xrange(len(loopLists)): insetLoops = loopLists[loopListIndex] inBetweenInsetLoops = getInBetweenLoopsFromLoops(insetLoops, importRadius) otherLoops = euclidean.getConcatenatedList((loopLists[:loopListIndex] + loopLists[(loopListIndex + 1):])) corners += getInsetPointsByInsetLoops(insetLoops, False, otherLoops, radiusSideNegative) allPoints += getInsetPointsByInsetLoops(inBetweenInsetLoops, False, otherLoops, radiusSideNegative) allPoints += corners[:] return triangle_mesh.getDescendingAreaOrientedLoops(allPoints, corners, importRadius)
[ "def", "getLoopsUnion", "(", "importRadius", ",", "loopLists", ")", ":", "allPoints", "=", "[", "]", "corners", "=", "getLoopsListsIntersections", "(", "loopLists", ")", "radiusSideNegative", "=", "(", "(", "-", "0.01", ")", "*", "importRadius", ")", "intercir...
get joined loops sliced through shape .
train
false
33,644
def del_prefix(key): return key.split(u'||')[(-1)]
[ "def", "del_prefix", "(", "key", ")", ":", "return", "key", ".", "split", "(", "u'||'", ")", "[", "(", "-", "1", ")", "]" ]
removes prefixes from the key .
train
false
33,645
@gen.coroutine def UpdatePhoto(client, obj_store, user_id, device_id, request): request['user_id'] = user_id if (request['headers']['original_version'] >= Message.ADD_OP_HEADER_VERSION): (yield Activity.VerifyActivityId(client, user_id, device_id, request['activity']['activity_id'])) else: (yield Activity.VerifyActivityId(client, user_id, 0, request['activity']['activity_id'])) request = {'headers': request.pop('headers'), 'act_dict': request.pop('activity'), 'ph_dict': request} (yield gen.Task(Operation.CreateAndExecute, client, user_id, device_id, 'Photo.UpdateOperation', request)) logging.info(('UPDATE PHOTO: user: %d, device: %d, photo: %s' % (user_id, device_id, request['ph_dict']['photo_id']))) raise gen.Return({})
[ "@", "gen", ".", "coroutine", "def", "UpdatePhoto", "(", "client", ",", "obj_store", ",", "user_id", ",", "device_id", ",", "request", ")", ":", "request", "[", "'user_id'", "]", "=", "user_id", "if", "(", "request", "[", "'headers'", "]", "[", "'origina...
updates photo metadata .
train
false
33,647
def max_rectangle_area(histogram): stack = [] top = (lambda : stack[(-1)]) max_area = 0 pos = 0 for (pos, height) in enumerate(histogram): start = pos while True: if ((not stack) or (height > top()[1])): stack.append((start, height)) elif (stack and (height < top()[1])): max_area = max(max_area, (top()[1] * (pos - top()[0]))) (start, _) = stack.pop() continue break pos += 1 for (start, height) in stack: max_area = max(max_area, (height * (pos - start))) return max_area
[ "def", "max_rectangle_area", "(", "histogram", ")", ":", "stack", "=", "[", "]", "top", "=", "(", "lambda", ":", "stack", "[", "(", "-", "1", ")", "]", ")", "max_area", "=", "0", "pos", "=", "0", "for", "(", "pos", ",", "height", ")", "in", "en...
find the area of the largest rectangle that fits entirely under the histogram .
train
false
33,648
def test_relative(): html = 'some <a href="/foo/bar">link</a>' eq_('some link', bleach.delinkify(html)) eq_(html, bleach.delinkify(html, allow_relative=True))
[ "def", "test_relative", "(", ")", ":", "html", "=", "'some <a href=\"/foo/bar\">link</a>'", "eq_", "(", "'some link'", ",", "bleach", ".", "delinkify", "(", "html", ")", ")", "eq_", "(", "html", ",", "bleach", ".", "delinkify", "(", "html", ",", "allow_relat...
relative links are optionally ok .
train
false
33,650
def reset(): _runtime.reset()
[ "def", "reset", "(", ")", ":", "_runtime", ".", "reset", "(", ")" ]
clear the rules used for policy checks .
train
false
33,651
def ensure_geometry(geom): if (not hasattr(geom, u'geom_type')): raise SpatialError((u"Point '%s' doesn't appear to be a GEOS geometry." % geom)) return geom
[ "def", "ensure_geometry", "(", "geom", ")", ":", "if", "(", "not", "hasattr", "(", "geom", ",", "u'geom_type'", ")", ")", ":", "raise", "SpatialError", "(", "(", "u\"Point '%s' doesn't appear to be a GEOS geometry.\"", "%", "geom", ")", ")", "return", "geom" ]
makes sure the parameter passed in looks like a geos geosgeometry .
train
false
33,652
def sampling_density(expr, given_condition=None, numsamples=1, **kwargs): results = {} for result in sample_iter(expr, given_condition, numsamples=numsamples, **kwargs): results[result] = (results.get(result, 0) + 1) return results
[ "def", "sampling_density", "(", "expr", ",", "given_condition", "=", "None", ",", "numsamples", "=", "1", ",", "**", "kwargs", ")", ":", "results", "=", "{", "}", "for", "result", "in", "sample_iter", "(", "expr", ",", "given_condition", ",", "numsamples",...
sampling version of density see also density sampling_p sampling_e .
train
false
33,653
def _parameter_objects(parameter_objects_from_pillars, parameter_object_overrides): from_pillars = copy.deepcopy(__salt__['pillar.get'](parameter_objects_from_pillars)) from_pillars.update(parameter_object_overrides) parameter_objects = _standardize(_dict_to_list_ids(from_pillars)) for parameter_object in parameter_objects: parameter_object['attributes'] = _properties_from_dict(parameter_object['attributes']) return parameter_objects
[ "def", "_parameter_objects", "(", "parameter_objects_from_pillars", ",", "parameter_object_overrides", ")", ":", "from_pillars", "=", "copy", ".", "deepcopy", "(", "__salt__", "[", "'pillar.get'", "]", "(", "parameter_objects_from_pillars", ")", ")", "from_pillars", "."...
return a list of parameter objects that configure the pipeline parameter_objects_from_pillars the pillar key to use for lookup parameter_object_overrides parameter objects to use .
train
true
33,654
def task_compress_bz2(): import bz2 with open(__file__, 'rb') as f: arg = (f.read(3000) * 2) def compress(s): bz2.compress(s) return (compress, (arg,))
[ "def", "task_compress_bz2", "(", ")", ":", "import", "bz2", "with", "open", "(", "__file__", ",", "'rb'", ")", "as", "f", ":", "arg", "=", "(", "f", ".", "read", "(", "3000", ")", "*", "2", ")", "def", "compress", "(", "s", ")", ":", "bz2", "."...
bz2 compression (c) .
train
false
33,655
@register.inclusion_tag('admin/cms/page/plugin/submit_line.html', takes_context=True) def submit_row_plugin(context): opts = context['opts'] change = context['change'] is_popup = context['is_popup'] save_as = context['save_as'] ctx = {'opts': opts, 'show_delete_link': (context.get('has_delete_permission', False) and change and context.get('show_delete', True)), 'show_save_as_new': ((not is_popup) and change and save_as), 'show_save_and_add_another': (context['has_add_permission'] and (not is_popup) and ((not save_as) or context['add'])), 'show_save_and_continue': ((not is_popup) and context['has_change_permission']), 'is_popup': is_popup, 'show_save': True, 'preserved_filters': context.get('preserved_filters')} if (context.get('original') is not None): ctx['original'] = context['original'] return ctx
[ "@", "register", ".", "inclusion_tag", "(", "'admin/cms/page/plugin/submit_line.html'", ",", "takes_context", "=", "True", ")", "def", "submit_row_plugin", "(", "context", ")", ":", "opts", "=", "context", "[", "'opts'", "]", "change", "=", "context", "[", "'cha...
displays the row of buttons for delete and save .
train
false
33,656
def _compare_dipoles(orig, new): assert_allclose(orig.times, new.times, atol=0.001, err_msg='times') assert_allclose(orig.pos, new.pos, err_msg='pos') assert_allclose(orig.amplitude, new.amplitude, err_msg='amplitude') assert_allclose(orig.gof, new.gof, err_msg='gof') assert_allclose(orig.ori, new.ori, rtol=0.0001, atol=0.0001, err_msg='ori') assert_equal(orig.name, new.name)
[ "def", "_compare_dipoles", "(", "orig", ",", "new", ")", ":", "assert_allclose", "(", "orig", ".", "times", ",", "new", ".", "times", ",", "atol", "=", "0.001", ",", "err_msg", "=", "'times'", ")", "assert_allclose", "(", "orig", ".", "pos", ",", "new"...
compare dipole results for equivalence .
train
false
33,658
def str_repeat(arr, repeats): if is_scalar(repeats): def rep(x): try: return compat.binary_type.__mul__(x, repeats) except TypeError: return compat.text_type.__mul__(x, repeats) return _na_map(rep, arr) else: def rep(x, r): try: return compat.binary_type.__mul__(x, r) except TypeError: return compat.text_type.__mul__(x, r) repeats = np.asarray(repeats, dtype=object) result = lib.vec_binop(_values_from_object(arr), repeats, rep) return result
[ "def", "str_repeat", "(", "arr", ",", "repeats", ")", ":", "if", "is_scalar", "(", "repeats", ")", ":", "def", "rep", "(", "x", ")", ":", "try", ":", "return", "compat", ".", "binary_type", ".", "__mul__", "(", "x", ",", "repeats", ")", "except", "...
duplicate each string in the series/index by indicated number of times .
train
false
33,659
@jit(nopython=True) def _generate_sample_paths_sparse(P_cdfs1d, indices, indptr, init_states, random_values, out): (num_reps, ts_length) = out.shape for i in range(num_reps): out[(i, 0)] = init_states[i] for t in range((ts_length - 1)): k = searchsorted(P_cdfs1d[indptr[out[(i, t)]]:indptr[(out[(i, t)] + 1)]], random_values[(i, t)]) out[(i, (t + 1))] = indices[(indptr[out[(i, t)]] + k)]
[ "@", "jit", "(", "nopython", "=", "True", ")", "def", "_generate_sample_paths_sparse", "(", "P_cdfs1d", ",", "indices", ",", "indptr", ",", "init_states", ",", "random_values", ",", "out", ")", ":", "(", "num_reps", ",", "ts_length", ")", "=", "out", ".", ...
for sparse matrix .
train
true
33,660
def runner_argspec(module=''): run_ = salt.runner.Runner(__opts__) return salt.utils.argspec_report(run_.functions, module)
[ "def", "runner_argspec", "(", "module", "=", "''", ")", ":", "run_", "=", "salt", ".", "runner", ".", "Runner", "(", "__opts__", ")", "return", "salt", ".", "utils", ".", "argspec_report", "(", "run_", ".", "functions", ",", "module", ")" ]
return the argument specification of functions in salt runner modules .
train
true
33,661
def message_from_binary_file(fp, *args, **kws): from email.parser import BytesParser return BytesParser(*args, **kws).parse(fp)
[ "def", "message_from_binary_file", "(", "fp", ",", "*", "args", ",", "**", "kws", ")", ":", "from", "email", ".", "parser", "import", "BytesParser", "return", "BytesParser", "(", "*", "args", ",", "**", "kws", ")", ".", "parse", "(", "fp", ")" ]
read a binary file and parse its contents into a message object model .
train
true
33,662
def getPythonFileNamesExceptInitRecursively(directoryName=''): pythonDirectoryNames = getPythonDirectoryNamesRecursively(directoryName) pythonFileNamesExceptInitRecursively = [] for pythonDirectoryName in pythonDirectoryNames: pythonFileNamesExceptInitRecursively += getPythonFileNamesExceptInit(os.path.join(pythonDirectoryName, '__init__.py')) pythonFileNamesExceptInitRecursively.sort() return pythonFileNamesExceptInitRecursively
[ "def", "getPythonFileNamesExceptInitRecursively", "(", "directoryName", "=", "''", ")", ":", "pythonDirectoryNames", "=", "getPythonDirectoryNamesRecursively", "(", "directoryName", ")", "pythonFileNamesExceptInitRecursively", "=", "[", "]", "for", "pythonDirectoryName", "in"...
get the python filenames of the directory recursively .
train
false
33,663
def add_code_cell(work_notebook, code): code_cell = {'cell_type': 'code', 'execution_count': None, 'metadata': {'collapsed': False}, 'outputs': [], 'source': [code.strip()]} work_notebook['cells'].append(code_cell)
[ "def", "add_code_cell", "(", "work_notebook", ",", "code", ")", ":", "code_cell", "=", "{", "'cell_type'", ":", "'code'", ",", "'execution_count'", ":", "None", ",", "'metadata'", ":", "{", "'collapsed'", ":", "False", "}", ",", "'outputs'", ":", "[", "]",...
add a code cell to the notebook parameters code : str cell content .
train
true
33,664
def run_make_files(make_docs=False, make_ui_files=True, force_compile_protos=False, sync_artifacts=True): if force_compile_protos: subprocess.check_call(['python', 'makefile.py', '--clean']) else: subprocess.check_call(['python', 'makefile.py']) if sync_artifacts: subprocess.check_call(['python', 'makefile.py'], cwd='grr/artifacts') if make_docs: subprocess.check_call(['python', 'makefile.py'], cwd='docs') if make_ui_files: subprocess.check_call(['npm', 'install'], cwd='grr/gui/static') subprocess.check_call(['npm', 'install', '-g', 'bower', 'gulp'], cwd='grr/gui/static') subprocess.check_call(['bower', 'update'], cwd='grr/gui/static') subprocess.check_call(['gulp', 'compile'], cwd='grr/gui/static')
[ "def", "run_make_files", "(", "make_docs", "=", "False", ",", "make_ui_files", "=", "True", ",", "force_compile_protos", "=", "False", ",", "sync_artifacts", "=", "True", ")", ":", "if", "force_compile_protos", ":", "subprocess", ".", "check_call", "(", "[", "...
builds necessary assets from sources .
train
false
33,665
def MakeRpcServer(option_dict): server = appengine_rpc.HttpRpcServer(option_dict[ARG_ADMIN_CONSOLE_SERVER], (lambda : ('unused_email', 'unused_password')), appcfg.GetUserAgent(), appcfg.GetSourceName(), host_override=option_dict[ARG_ADMIN_CONSOLE_HOST]) server.authenticated = True return server
[ "def", "MakeRpcServer", "(", "option_dict", ")", ":", "server", "=", "appengine_rpc", ".", "HttpRpcServer", "(", "option_dict", "[", "ARG_ADMIN_CONSOLE_SERVER", "]", ",", "(", "lambda", ":", "(", "'unused_email'", ",", "'unused_password'", ")", ")", ",", "appcfg...
create a new httprpcserver .
train
false
33,667
def combine_envs(*envs): return _combine_envs_helper(envs, local=False)
[ "def", "combine_envs", "(", "*", "envs", ")", ":", "return", "_combine_envs_helper", "(", "envs", ",", "local", "=", "False", ")" ]
combine zero or more dictionaries containing environment variables .
train
false
33,668
@register.as_tag def profile_form(user): if isinstance(user, User): return get_profile_form()(instance=user) return u''
[ "@", "register", ".", "as_tag", "def", "profile_form", "(", "user", ")", ":", "if", "isinstance", "(", "user", ",", "User", ")", ":", "return", "get_profile_form", "(", ")", "(", "instance", "=", "user", ")", "return", "u''" ]
returns the profile form for a user: {% if request .
train
false
33,670
def is_flat_source(source): if isinstance(source, (tuple, list)): for sub_source in source: if isinstance(sub_source, (tuple, list)): return False elif (not isinstance(source, str)): raise TypeError(('source should be a string or a non-nested tuple/list of strings: %s' % source)) return True
[ "def", "is_flat_source", "(", "source", ")", ":", "if", "isinstance", "(", "source", ",", "(", "tuple", ",", "list", ")", ")", ":", "for", "sub_source", "in", "source", ":", "if", "isinstance", "(", "sub_source", ",", "(", "tuple", ",", "list", ")", ...
returns true for a string or a non-nested tuple of strings parameters source : writeme returns writeme .
train
false
33,671
def errorInFile(f, line=17, name=''): return ('%s:%d:%s' % (f, line, name))
[ "def", "errorInFile", "(", "f", ",", "line", "=", "17", ",", "name", "=", "''", ")", ":", "return", "(", "'%s:%d:%s'", "%", "(", "f", ",", "line", ",", "name", ")", ")" ]
return a filename formatted so emacs will recognize it as an error point .
train
false
33,672
def set_user(user): _store.user = user
[ "def", "set_user", "(", "user", ")", ":", "_store", ".", "user", "=", "user" ]
set current user .
train
false
33,673
def get_monitor_timeout(scheme=None): return _get_powercfg_minute_values(scheme, 'SUB_VIDEO', 'VIDEOIDLE', 'Turn off display after')
[ "def", "get_monitor_timeout", "(", "scheme", "=", "None", ")", ":", "return", "_get_powercfg_minute_values", "(", "scheme", ",", "'SUB_VIDEO'", ",", "'VIDEOIDLE'", ",", "'Turn off display after'", ")" ]
get the current monitor timeout of the given scheme cli example: .
train
false
33,676
def getFileInGivenDirectory(directory, fileName): directoryListing = os.listdir(directory) lowerFileName = fileName.lower() for directoryFile in directoryListing: if (directoryFile.lower() == lowerFileName): return getFileTextGivenDirectoryFileName(directory, directoryFile) return ''
[ "def", "getFileInGivenDirectory", "(", "directory", ",", "fileName", ")", ":", "directoryListing", "=", "os", ".", "listdir", "(", "directory", ")", "lowerFileName", "=", "fileName", ".", "lower", "(", ")", "for", "directoryFile", "in", "directoryListing", ":", ...
get the file from the filename or the lowercase filename in the given directory .
train
false
33,681
def ImportConfig(filename, config): sections_to_import = ['PrivateKeys'] entries_to_import = ['Client.executable_signing_public_key', 'CA.certificate', 'Frontend.certificate'] options_imported = 0 old_config = config_lib.CONFIG.MakeNewConfig() old_config.Initialize(filename) for entry in old_config.raw_data.keys(): try: section = entry.split('.')[0] if ((section in sections_to_import) or (entry in entries_to_import)): config.Set(entry, old_config.Get(entry)) print ('Imported %s.' % entry) options_imported += 1 except Exception as e: print ('Exception during import of %s: %s' % (entry, e)) return options_imported
[ "def", "ImportConfig", "(", "filename", ",", "config", ")", ":", "sections_to_import", "=", "[", "'PrivateKeys'", "]", "entries_to_import", "=", "[", "'Client.executable_signing_public_key'", ",", "'CA.certificate'", ",", "'Frontend.certificate'", "]", "options_imported",...
reads an old config file and imports keys and user accounts .
train
true
33,683
@register.simple_tag(takes_context=True) def slugurl(context, slug): page = Page.objects.filter(slug=slug).first() if page: return page.relative_url(context[u'request'].site) else: return None
[ "@", "register", ".", "simple_tag", "(", "takes_context", "=", "True", ")", "def", "slugurl", "(", "context", ",", "slug", ")", ":", "page", "=", "Page", ".", "objects", ".", "filter", "(", "slug", "=", "slug", ")", ".", "first", "(", ")", "if", "p...
returns the url for the page that has the given slug .
train
false
33,684
def subst_vars(s, local_vars): check_environ() def _subst(match, local_vars=local_vars): var_name = match.group(1) if (var_name in local_vars): return str(local_vars[var_name]) else: return os.environ[var_name] try: return re.sub('\\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s) except KeyError as var: raise ValueError, ("invalid variable '$%s'" % var)
[ "def", "subst_vars", "(", "s", ",", "local_vars", ")", ":", "check_environ", "(", ")", "def", "_subst", "(", "match", ",", "local_vars", "=", "local_vars", ")", ":", "var_name", "=", "match", ".", "group", "(", "1", ")", "if", "(", "var_name", "in", ...
perform shell/perl-style variable substitution on string .
train
false
33,686
def get_problem_grade_distribution(course_id): db_query = models.StudentModule.objects.filter(course_id__exact=course_id, grade__isnull=False, module_type__exact='problem').values('module_state_key', 'grade', 'max_grade').annotate(count_grade=Count('grade')) prob_grade_distrib = {} total_student_count = {} for row in db_query: curr_problem = course_id.make_usage_key_from_deprecated_string(row['module_state_key']) if (curr_problem in prob_grade_distrib): prob_grade_distrib[curr_problem]['grade_distrib'].append((row['grade'], row['count_grade'])) if ((prob_grade_distrib[curr_problem]['max_grade'] != row['max_grade']) and (prob_grade_distrib[curr_problem]['max_grade'] < row['max_grade'])): prob_grade_distrib[curr_problem]['max_grade'] = row['max_grade'] else: prob_grade_distrib[curr_problem] = {'max_grade': row['max_grade'], 'grade_distrib': [(row['grade'], row['count_grade'])]} total_student_count[curr_problem] = (total_student_count.get(curr_problem, 0) + row['count_grade']) return (prob_grade_distrib, total_student_count)
[ "def", "get_problem_grade_distribution", "(", "course_id", ")", ":", "db_query", "=", "models", ".", "StudentModule", ".", "objects", ".", "filter", "(", "course_id__exact", "=", "course_id", ",", "grade__isnull", "=", "False", ",", "module_type__exact", "=", "'pr...
returns the grade distribution per problem for the course course_id the course id for the course interested in output is 2 dicts: prob-grade_distrib where the key is the problem module_id and the value is a dict with: max_grade - max grade for this problem grade_distrib - array of tuples .
train
false
33,688
def pv_present(name, **kwargs): ret = {'changes': {}, 'comment': '', 'name': name, 'result': True} if __salt__['lvm.pvdisplay'](name): ret['comment'] = 'Physical Volume {0} already present'.format(name) elif __opts__['test']: ret['comment'] = 'Physical Volume {0} is set to be created'.format(name) ret['result'] = None return ret else: changes = __salt__['lvm.pvcreate'](name, **kwargs) if __salt__['lvm.pvdisplay'](name): ret['comment'] = 'Created Physical Volume {0}'.format(name) ret['changes']['created'] = changes else: ret['comment'] = 'Failed to create Physical Volume {0}'.format(name) ret['result'] = False return ret
[ "def", "pv_present", "(", "name", ",", "**", "kwargs", ")", ":", "ret", "=", "{", "'changes'", ":", "{", "}", ",", "'comment'", ":", "''", ",", "'name'", ":", "name", ",", "'result'", ":", "True", "}", "if", "__salt__", "[", "'lvm.pvdisplay'", "]", ...
set a physical device to be used as an lvm physical volume name the device name to initialize .
train
true
33,689
def system_output(command, timeout=None, ignore_status=False, retain_output=False, args=(), verbose=True): if retain_output: out = run(command, timeout=timeout, ignore_status=ignore_status, stdout_tee=TEE_TO_LOGS, stderr_tee=TEE_TO_LOGS, verbose=verbose, args=args).stdout else: out = run(command, timeout=timeout, ignore_status=ignore_status, verbose=verbose, args=args).stdout if (out[(-1):] == '\n'): out = out[:(-1)] return out
[ "def", "system_output", "(", "command", ",", "timeout", "=", "None", ",", "ignore_status", "=", "False", ",", "retain_output", "=", "False", ",", "args", "=", "(", ")", ",", "verbose", "=", "True", ")", ":", "if", "retain_output", ":", "out", "=", "run...
run a command and return the stdout output .
train
false
33,690
def _get_last_snapshot(config='root'): snapshot_list = sorted(list_snapshots(config), key=(lambda x: x['id'])) return snapshot_list[(-1)]
[ "def", "_get_last_snapshot", "(", "config", "=", "'root'", ")", ":", "snapshot_list", "=", "sorted", "(", "list_snapshots", "(", "config", ")", ",", "key", "=", "(", "lambda", "x", ":", "x", "[", "'id'", "]", ")", ")", "return", "snapshot_list", "[", "...
returns the last existing created snapshot .
train
true
33,692
def is_service_running(service): service_running = False try: bus = SystemBus() systemd = bus.get_object('org.freedesktop.systemd1', '/org/freedesktop/systemd1') manager = Interface(systemd, dbus_interface='org.freedesktop.systemd1.Manager') service_unit = (service if service.endswith('.service') else manager.GetUnit('{0}.service'.format(service))) service_proxy = bus.get_object('org.freedesktop.systemd1', str(service_unit)) service_properties = Interface(service_proxy, dbus_interface='org.freedesktop.DBus.Properties') service_load_state = service_properties.Get('org.freedesktop.systemd1.Unit', 'LoadState') service_active_state = service_properties.Get('org.freedesktop.systemd1.Unit', 'ActiveState') if ((service_load_state == 'loaded') and (service_active_state == 'active')): service_running = True except DBusException: pass return service_running
[ "def", "is_service_running", "(", "service", ")", ":", "service_running", "=", "False", "try", ":", "bus", "=", "SystemBus", "(", ")", "systemd", "=", "bus", ".", "get_object", "(", "'org.freedesktop.systemd1'", ",", "'/org/freedesktop/systemd1'", ")", "manager", ...
queries systemd through dbus to see if the service is running .
train
false
33,693
def _calculate_image_filename(instance, filename): return os.path.join(settings.ANNOUNCEMENTS_PHOTO_DIR, (str(uuid.uuid4()) + '.jpg'))
[ "def", "_calculate_image_filename", "(", "instance", ",", "filename", ")", ":", "return", "os", ".", "path", ".", "join", "(", "settings", ".", "ANNOUNCEMENTS_PHOTO_DIR", ",", "(", "str", "(", "uuid", ".", "uuid4", "(", ")", ")", "+", "'.jpg'", ")", ")" ...
generate a unique filename for uploaded image .
train
false
33,694
def initializeMaildir(dir): if (not os.path.isdir(dir)): os.mkdir(dir, 448) for subdir in ['new', 'cur', 'tmp', '.Trash']: os.mkdir(os.path.join(dir, subdir), 448) for subdir in ['new', 'cur', 'tmp']: os.mkdir(os.path.join(dir, '.Trash', subdir), 448) open(os.path.join(dir, '.Trash', 'maildirfolder'), 'w').close()
[ "def", "initializeMaildir", "(", "dir", ")", ":", "if", "(", "not", "os", ".", "path", ".", "isdir", "(", "dir", ")", ")", ":", "os", ".", "mkdir", "(", "dir", ",", "448", ")", "for", "subdir", "in", "[", "'new'", ",", "'cur'", ",", "'tmp'", ",...
create a maildir user directory if it doesnt already exist .
train
false
33,696
def check_classes(gtype, section): try: return (sabnzbd.config.get_config(section, ('%s_prio_%s' % (section, gtype)))() > 0) except TypeError: logging.debug('Incorrect Notify option %s:%s_prio_%s', section, section, gtype) return False
[ "def", "check_classes", "(", "gtype", ",", "section", ")", ":", "try", ":", "return", "(", "sabnzbd", ".", "config", ".", "get_config", "(", "section", ",", "(", "'%s_prio_%s'", "%", "(", "section", ",", "gtype", ")", ")", ")", "(", ")", ">", "0", ...
check if gtype is enabled in section .
train
false
33,697
def _Call(call, req, resp): if hasattr(req, 'app_id'): req.set_app_id(datastore_types.ResolveAppId(req.app_id())) try: result = apiproxy_stub_map.MakeSyncCall('datastore_v3', call, req, resp) if result: return result return resp except apiproxy_errors.ApplicationError as err: raise datastore._ToDatastoreError(err)
[ "def", "_Call", "(", "call", ",", "req", ",", "resp", ")", ":", "if", "hasattr", "(", "req", ",", "'app_id'", ")", ":", "req", ".", "set_app_id", "(", "datastore_types", ".", "ResolveAppId", "(", "req", ".", "app_id", "(", ")", ")", ")", "try", ":"...
generic method for making a datastore api call .
train
false
33,698
def broadcast_arrays(*args): return broadcast(*args).values
[ "def", "broadcast_arrays", "(", "*", "args", ")", ":", "return", "broadcast", "(", "*", "args", ")", ".", "values" ]
broadcasts given arrays .
train
false
33,699
def make_sparse_coded_signal(n_samples, n_components, n_features, n_nonzero_coefs, random_state=None): generator = check_random_state(random_state) D = generator.randn(n_features, n_components) D /= np.sqrt(np.sum((D ** 2), axis=0)) X = np.zeros((n_components, n_samples)) for i in range(n_samples): idx = np.arange(n_components) generator.shuffle(idx) idx = idx[:n_nonzero_coefs] X[(idx, i)] = generator.randn(n_nonzero_coefs) Y = np.dot(D, X) return map(np.squeeze, (Y, D, X))
[ "def", "make_sparse_coded_signal", "(", "n_samples", ",", "n_components", ",", "n_features", ",", "n_nonzero_coefs", ",", "random_state", "=", "None", ")", ":", "generator", "=", "check_random_state", "(", "random_state", ")", "D", "=", "generator", ".", "randn", ...
generate a signal as a sparse combination of dictionary elements .
train
false
33,700
def _add_regexp_listener(dbapi_con, con_record): def regexp(expr, item): reg = re.compile(expr) return (reg.search(six.text_type(item)) is not None) dbapi_con.create_function('regexp', 2, regexp)
[ "def", "_add_regexp_listener", "(", "dbapi_con", ",", "con_record", ")", ":", "def", "regexp", "(", "expr", ",", "item", ")", ":", "reg", "=", "re", ".", "compile", "(", "expr", ")", "return", "(", "reg", ".", "search", "(", "six", ".", "text_type", ...
add regexp function to sqlite connections .
train
false
33,701
def in6_getLocalUniquePrefix(): tod = time.time() i = int(tod) j = int(((tod - i) * (2 ** 32))) tod = struct.pack('!II', i, j) rawmac = get_if_raw_hwaddr(conf.iface6)[1] mac = ':'.join(map((lambda x: ('%.02x' % ord(x))), list(rawmac))) eui64 = inet_pton(socket.AF_INET6, ('::' + in6_mactoifaceid(mac)))[8:] import sha globalid = sha.new((tod + eui64)).digest()[:5] return inet_ntop(socket.AF_INET6, (('\xfd' + globalid) + ('\x00' * 10)))
[ "def", "in6_getLocalUniquePrefix", "(", ")", ":", "tod", "=", "time", ".", "time", "(", ")", "i", "=", "int", "(", "tod", ")", "j", "=", "int", "(", "(", "(", "tod", "-", "i", ")", "*", "(", "2", "**", "32", ")", ")", ")", "tod", "=", "stru...
returns a pseudo-randomly generated local unique prefix .
train
true
33,704
def secure_password(length=20, use_random=True): length = int(length) pw = '' while (len(pw) < length): if (HAS_RANDOM and use_random): pw += re.sub('\\W', '', Crypto.Random.get_random_bytes(1)) else: pw += random.SystemRandom().choice((string.ascii_letters + string.digits)) return pw
[ "def", "secure_password", "(", "length", "=", "20", ",", "use_random", "=", "True", ")", ":", "length", "=", "int", "(", "length", ")", "pw", "=", "''", "while", "(", "len", "(", "pw", ")", "<", "length", ")", ":", "if", "(", "HAS_RANDOM", "and", ...
generate a secure password .
train
false
33,706
def PrintHelp(): print 'Usage: s3util [options] [command] [file/dir ...]\nCommands:\n bench [options] <test dir> ; perform S3 benchmark\n -bench_read ; benchmark Get operation (Put if False) [True]\n -bench_iterations=X ; number of iterations per size [10]\n -bench_parallel ; run Put/Get requests within a size in parallel [False]\n', ' -bench_size_powers=X,Y:Z ; sizes to test, in powers of two [10:20]\n cat <file> ; output the contents of a file in S3. (Auto unzip of .gz files)\n grep [pattern] [file0 file1 ... ] ; search for regexp "pattern" in files. Auto unzip of .gz files.\n ls [options] [pattern] ; list contents of a directory\n -R ; recursively list files [False]\n mv [options] [pattern] [dest] ; move matching files to dest\n --verify ; verify destination files [True]\n --delete_source ; delete source files [True]\n put <source file> <dest dir> ; copy a file to a directory in S3\n'
[ "def", "PrintHelp", "(", ")", ":", "print", "'Usage: s3util [options] [command] [file/dir ...]\\nCommands:\\n bench [options] <test dir> ; perform S3 benchmark\\n -bench_read ; benchmark Get operation (Put if False) [True]\\n -bench_iterations=X ; number of ...
print the summary help message .
train
false
33,707
def login_decorator(func): def wrap(provider, *args, **kwargs): error = None result = authomatic.core.LoginResult(provider) try: func(provider, *args, **kwargs) except Exception as e: if provider.settings.report_errors: error = e provider._log(logging.ERROR, u'Reported suppressed exception: {0}!'.format(repr(error))) else: if provider.settings.debug: provider.write(_error_traceback_html(sys.exc_info(), traceback.format_exc())) raise if (provider.user or error): result = authomatic.core.LoginResult(provider) result.error = error if isinstance(provider.session, authomatic.core.Session): provider.session.delete() provider._log(logging.INFO, u'Procedure finished.') if provider.callback: provider.callback(result) return result else: provider.save_session() return wrap
[ "def", "login_decorator", "(", "func", ")", ":", "def", "wrap", "(", "provider", ",", "*", "args", ",", "**", "kwargs", ")", ":", "error", "=", "None", "result", "=", "authomatic", ".", "core", ".", "LoginResult", "(", "provider", ")", "try", ":", "f...
decorate the :meth: .
train
true
33,708
def get_safe_settings(): settings_dict = {} for k in dir(settings): if k.isupper(): if HIDDEN_SETTINGS.search(k): settings_dict[k] = '********************' else: settings_dict[k] = getattr(settings, k) return settings_dict
[ "def", "get_safe_settings", "(", ")", ":", "settings_dict", "=", "{", "}", "for", "k", "in", "dir", "(", "settings", ")", ":", "if", "k", ".", "isupper", "(", ")", ":", "if", "HIDDEN_SETTINGS", ".", "search", "(", "k", ")", ":", "settings_dict", "[",...
returns a dictionary of the settings module .
train
false
33,711
def test_import_hooks_importer(): global myimpCalled myimpCalled = None class myimp(object, ): def find_module(self, fullname, path=None): global myimpCalled myimpCalled = (fullname, path) if (fullname == 'does_not_exist_throw'): raise Exception('hello') mi = myimp() sys.meta_path.append(mi) try: try: import does_not_exist AssertUnreachable() except ImportError: pass AreEqual(myimpCalled, ('does_not_exist', None)) try: from testpkg1 import blah AssertUnreachable() except ImportError: pass AreEqual(type(myimpCalled[1]), list) AreEqual(myimpCalled[0], 'testpkg1.blah') AreEqual(myimpCalled[1][0][(-8):], 'testpkg1') def f(): import does_not_exist_throw AssertErrorWithMessage(Exception, 'hello', f) finally: sys.meta_path.remove(mi)
[ "def", "test_import_hooks_importer", "(", ")", ":", "global", "myimpCalled", "myimpCalled", "=", "None", "class", "myimp", "(", "object", ",", ")", ":", "def", "find_module", "(", "self", ",", "fullname", ",", "path", "=", "None", ")", ":", "global", "myim...
importer tests - verify the importer gets passed correct values .
train
false
33,713
def handle_page_crumb(func): @wraps(func) def wrapper(path, model, page, root_name): path = PAGE_REGEXP.sub('', path) breadcrumbs = func(path, model, root_name) if page: if (page.number > 1): breadcrumbs[(-1)].url = path page_crumb = Crumb((_('Page %s') % page.number)) breadcrumbs.append(page_crumb) return breadcrumbs return wrapper
[ "def", "handle_page_crumb", "(", "func", ")", ":", "@", "wraps", "(", "func", ")", "def", "wrapper", "(", "path", ",", "model", ",", "page", ",", "root_name", ")", ":", "path", "=", "PAGE_REGEXP", ".", "sub", "(", "''", ",", "path", ")", "breadcrumbs...
decorator for handling the current page in the breadcrumbs .
train
true
33,714
def get_cpu_arch(): f = open('/proc/cpuinfo', 'r') cpuinfo = f.readlines() f.close() if list_grep(cpuinfo, '^cpu.*(RS64|POWER3|Broadband Engine)'): return 'power' elif list_grep(cpuinfo, '^cpu.*POWER4'): return 'power4' elif list_grep(cpuinfo, '^cpu.*POWER5'): return 'power5' elif list_grep(cpuinfo, '^cpu.*POWER6'): return 'power6' elif list_grep(cpuinfo, '^cpu.*POWER7'): return 'power7' elif list_grep(cpuinfo, '^cpu.*PPC970'): return 'power970' elif list_grep(cpuinfo, 'ARM'): return 'arm' elif list_grep(cpuinfo, '^flags.*:.* lm .*'): return 'x86_64' else: return 'i386'
[ "def", "get_cpu_arch", "(", ")", ":", "f", "=", "open", "(", "'/proc/cpuinfo'", ",", "'r'", ")", "cpuinfo", "=", "f", ".", "readlines", "(", ")", "f", ".", "close", "(", ")", "if", "list_grep", "(", "cpuinfo", ",", "'^cpu.*(RS64|POWER3|Broadband Engine)'",...
work out which cpu architecture were running on .
train
false
33,715
def strip_fragment(url): (url, fragment) = splittag(url) return url
[ "def", "strip_fragment", "(", "url", ")", ":", "(", "url", ",", "fragment", ")", "=", "splittag", "(", "url", ")", "return", "url" ]
in python 8280 <URL python 2 .
train
false
33,716
def test_expr(expr, allowed_codes): import dis allowed_codes = [dis.opmap[c] for c in allowed_codes] try: c = compile(expr, '', 'eval') except SyntaxError: raise ValueError(('%s is not a valid expression' % expr)) codes = _get_opcodes(c) for code in codes: if (code not in allowed_codes): raise ValueError(('opcode %s not allowed' % dis.opname[code])) return c
[ "def", "test_expr", "(", "expr", ",", "allowed_codes", ")", ":", "import", "dis", "allowed_codes", "=", "[", "dis", ".", "opmap", "[", "c", "]", "for", "c", "in", "allowed_codes", "]", "try", ":", "c", "=", "compile", "(", "expr", ",", "''", ",", "...
test_expr -> code_object test that the expression contains only the allowed opcodes .
train
false
33,718
def get_security_attributes_for_user(user=None): if (user is None): user = get_current_user() assert isinstance(user, TOKEN_USER), 'user must be TOKEN_USER instance' SD = SECURITY_DESCRIPTOR() SA = SECURITY_ATTRIBUTES() SA.descriptor = SD SA.bInheritHandle = 1 ctypes.windll.advapi32.InitializeSecurityDescriptor(ctypes.byref(SD), SECURITY_DESCRIPTOR.REVISION) ctypes.windll.advapi32.SetSecurityDescriptorOwner(ctypes.byref(SD), user.SID, 0) return SA
[ "def", "get_security_attributes_for_user", "(", "user", "=", "None", ")", ":", "if", "(", "user", "is", "None", ")", ":", "user", "=", "get_current_user", "(", ")", "assert", "isinstance", "(", "user", ",", "TOKEN_USER", ")", ",", "'user must be TOKEN_USER ins...
return a security_attributes structure with the sid set to the specified user .
train
true
33,719
def render_custom_content(body, context_data={}): lexer = Lexer(body, origin=None) parser = CustomPayloadParser(lexer.tokenize()) nodes = parser.parse() return nodes.render(Context(context_data))
[ "def", "render_custom_content", "(", "body", ",", "context_data", "=", "{", "}", ")", ":", "lexer", "=", "Lexer", "(", "body", ",", "origin", "=", "None", ")", "parser", "=", "CustomPayloadParser", "(", "lexer", ".", "tokenize", "(", ")", ")", "nodes", ...
renders custom content for the payload using django templating .
train
false
33,720
def SetCoreGRRKnowledgeBaseValues(kb, client_obj): client_schema = client_obj.Schema kb.hostname = utils.SmartUnicode(client_obj.Get(client_schema.FQDN, '')) if (not kb.hostname): kb.hostname = utils.SmartUnicode(client_obj.Get(client_schema.HOSTNAME, '')) versions = client_obj.Get(client_schema.OS_VERSION) if (versions and versions.versions): kb.os_major_version = versions.versions[0] kb.os_minor_version = versions.versions[1] client_os = client_obj.Get(client_schema.SYSTEM) if client_os: kb.os = utils.SmartUnicode(client_obj.Get(client_schema.SYSTEM))
[ "def", "SetCoreGRRKnowledgeBaseValues", "(", "kb", ",", "client_obj", ")", ":", "client_schema", "=", "client_obj", ".", "Schema", "kb", ".", "hostname", "=", "utils", ".", "SmartUnicode", "(", "client_obj", ".", "Get", "(", "client_schema", ".", "FQDN", ",", ...
set core values from grr into the knowledgebase .
train
true
33,721
def cmp_policy_info(info, remote_info): def is_deleted(info): return ((info['delete_timestamp'] > info['put_timestamp']) and (info.get('count', info.get('object_count', 0)) == 0)) deleted = is_deleted(info) remote_deleted = is_deleted(remote_info) if any([deleted, remote_deleted]): if (not deleted): return (-1) elif (not remote_deleted): return 1 return cmp(remote_info['status_changed_at'], info['status_changed_at']) def has_been_recreated(info): return (info['put_timestamp'] > info['delete_timestamp'] > Timestamp(0)) remote_recreated = has_been_recreated(remote_info) recreated = has_been_recreated(info) if any([remote_recreated, recreated]): if (not recreated): return 1 elif (not remote_recreated): return (-1) return cmp(remote_info['status_changed_at'], info['status_changed_at']) return cmp(info['status_changed_at'], remote_info['status_changed_at'])
[ "def", "cmp_policy_info", "(", "info", ",", "remote_info", ")", ":", "def", "is_deleted", "(", "info", ")", ":", "return", "(", "(", "info", "[", "'delete_timestamp'", "]", ">", "info", "[", "'put_timestamp'", "]", ")", "and", "(", "info", ".", "get", ...
you have to squint to see it .
train
false
33,723
@core_helper def add_url_param(alternative_url=None, controller=None, action=None, extras=None, new_params=None): params_nopage = [(k, v) for (k, v) in request.params.items() if (k != 'page')] params = set(params_nopage) if new_params: params |= set(new_params.items()) if alternative_url: return _url_with_params(alternative_url, params) return _create_url_with_params(params=params, controller=controller, action=action, extras=extras)
[ "@", "core_helper", "def", "add_url_param", "(", "alternative_url", "=", "None", ",", "controller", "=", "None", ",", "action", "=", "None", ",", "extras", "=", "None", ",", "new_params", "=", "None", ")", ":", "params_nopage", "=", "[", "(", "k", ",", ...
adds extra parameters to existing ones controller action & extras are used to create the base url via :py:func:~ckan .
train
false
33,724
def get_default_username(distribution): return 'root'
[ "def", "get_default_username", "(", "distribution", ")", ":", "return", "'root'" ]
try to determine the current system users username to use as a default .
train
false
33,725
@contextmanager def _get_serv(ret=None, commit=False): _options = _get_options(ret) connect = True if (__context__ and ('mysql_returner_conn' in __context__)): try: log.debug('Trying to reuse MySQL connection pool') conn = __context__['mysql_returner_conn'] conn.ping() connect = False except MySQLdb.connections.OperationalError as exc: log.debug('OperationalError on ping: {0}'.format(exc)) if connect: log.debug('Generating new MySQL connection pool') try: ssl_options = {} if _options.get('ssl_ca'): ssl_options['ca'] = _options.get('ssl_ca') if _options.get('ssl_cert'): ssl_options['cert'] = _options.get('ssl_cert') if _options.get('ssl_key'): ssl_options['key'] = _options.get('ssl_key') conn = MySQLdb.connect(host=_options.get('host'), user=_options.get('user'), passwd=_options.get('pass'), db=_options.get('db'), port=_options.get('port'), ssl=ssl_options) try: __context__['mysql_returner_conn'] = conn except TypeError: pass except MySQLdb.connections.OperationalError as exc: raise salt.exceptions.SaltMasterError('MySQL returner could not connect to database: {exc}'.format(exc=exc)) cursor = conn.cursor() try: (yield cursor) except MySQLdb.DatabaseError as err: error = err.args sys.stderr.write(str(error)) cursor.execute('ROLLBACK') raise err else: if commit: cursor.execute('COMMIT') else: cursor.execute('ROLLBACK')
[ "@", "contextmanager", "def", "_get_serv", "(", "ret", "=", "None", ",", "commit", "=", "False", ")", ":", "_options", "=", "_get_options", "(", "ret", ")", "connect", "=", "True", "if", "(", "__context__", "and", "(", "'mysql_returner_conn'", "in", "__con...
return an influxdb client object .
train
true
33,726
def is_same_entry(entry_1, entry_2): if (entry_1 == entry_2): return True if (os.path.realpath(entry_1) == os.path.realpath(entry_2)): return True if ((os.path.basename(entry_1) == os.path.basename(entry_2)) and (os.path.basename(os.path.dirname(entry_1)) == os.path.basename(os.path.dirname(entry_2))) and os.path.basename(os.path.dirname(entry_1)).startswith('tmp')): return True return False
[ "def", "is_same_entry", "(", "entry_1", ",", "entry_2", ")", ":", "if", "(", "entry_1", "==", "entry_2", ")", ":", "return", "True", "if", "(", "os", ".", "path", ".", "realpath", "(", "entry_1", ")", "==", "os", ".", "path", ".", "realpath", "(", ...
return true iff both paths can be considered to point to the same module .
train
false
33,727
def get_py_state_ptr(context, builder): return get_state_ptr(context, builder, 'py')
[ "def", "get_py_state_ptr", "(", "context", ",", "builder", ")", ":", "return", "get_state_ptr", "(", "context", ",", "builder", ",", "'py'", ")" ]
get a pointer to the thread-local python random state .
train
false
33,728
def get_object_fallback(cls, title, locale, default=None, **kwargs): try: return cls.objects.get(title=title, locale=locale, **kwargs) except (cls.DoesNotExist, IOError): pass try: default_lang_doc = cls.objects.get(title=title, locale=settings.WIKI_DEFAULT_LANGUAGE, **kwargs) if hasattr(default_lang_doc, 'translated_to'): trans = default_lang_doc.translated_to(locale) if (trans and trans.current_revision): return trans if hasattr(default_lang_doc, 'redirect_document'): target = default_lang_doc.redirect_document() if target: trans = target.translated_to(locale) if (trans and trans.current_revision): return trans return default_lang_doc except (cls.DoesNotExist, IOError): return default
[ "def", "get_object_fallback", "(", "cls", ",", "title", ",", "locale", ",", "default", "=", "None", ",", "**", "kwargs", ")", ":", "try", ":", "return", "cls", ".", "objects", ".", "get", "(", "title", "=", "title", ",", "locale", "=", "locale", ",",...
return an instance of cls matching title and locale .
train
false
33,729
def randomization_bin(seed, problem_id): r_hash = hashlib.sha1() r_hash.update(str(seed)) r_hash.update(str(problem_id)) return (int(r_hash.hexdigest()[:7], 16) % NUM_RANDOMIZATION_BINS)
[ "def", "randomization_bin", "(", "seed", ",", "problem_id", ")", ":", "r_hash", "=", "hashlib", ".", "sha1", "(", ")", "r_hash", ".", "update", "(", "str", "(", "seed", ")", ")", "r_hash", ".", "update", "(", "str", "(", "problem_id", ")", ")", "retu...
pick a randomization bin for the problem given the users seed and a problem id .
train
false
33,732
def libvlc_clock(): f = (_Cfunctions.get('libvlc_clock', None) or _Cfunction('libvlc_clock', (), None, ctypes.c_int64)) return f()
[ "def", "libvlc_clock", "(", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_clock'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_clock'", ",", "(", ")", ",", "None", ",", "ctypes", ".", "c_int64", ")", ")", "return", "f", "(...
return the current time as defined by libvlc .
train
false
33,733
@_blocked_elementwise def sign_round_up(X): Y = X.copy() Y[(Y == 0)] = 1 Y /= np.abs(Y) return Y
[ "@", "_blocked_elementwise", "def", "sign_round_up", "(", "X", ")", ":", "Y", "=", "X", ".", "copy", "(", ")", "Y", "[", "(", "Y", "==", "0", ")", "]", "=", "1", "Y", "/=", "np", ".", "abs", "(", "Y", ")", "return", "Y" ]
this should do the right thing for both real and complex matrices .
train
false
33,734
def import_stages(): stages = [] for plugin in find_plugins(): if hasattr(plugin, 'import_stages'): stages += plugin.import_stages return stages
[ "def", "import_stages", "(", ")", ":", "stages", "=", "[", "]", "for", "plugin", "in", "find_plugins", "(", ")", ":", "if", "hasattr", "(", "plugin", ",", "'import_stages'", ")", ":", "stages", "+=", "plugin", ".", "import_stages", "return", "stages" ]
get a list of import stage functions defined by plugins .
train
false
33,735
@utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) def do_set_password(cs, args): server = _find_server(cs, args.server) p1 = getpass.getpass('New password: ') p2 = getpass.getpass('Again: ') if (p1 != p2): raise exceptions.CommandError(_('Passwords do not match.')) server.change_password(p1)
[ "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "def", "do_set_password", "(", "cs", ",", "args", ")", ":", "server", "=", "_find_server", "(", "cs", ",", "a...
change the admin password for a server .
train
false
33,737
def _validate_clientsecrets(clientsecrets_dict): _INVALID_FILE_FORMAT_MSG = 'Invalid file format. See https://developers.google.com/api-client-library/python/guide/aaa_client_secrets' if (clientsecrets_dict is None): raise InvalidClientSecretsError(_INVALID_FILE_FORMAT_MSG) try: ((client_type, client_info),) = clientsecrets_dict.items() except (ValueError, AttributeError): raise InvalidClientSecretsError((_INVALID_FILE_FORMAT_MSG + ' Expected a JSON object with a single property for a "web" or "installed" application')) if (client_type not in VALID_CLIENT): raise InvalidClientSecretsError('Unknown client type: {0}.'.format(client_type)) for prop_name in VALID_CLIENT[client_type]['required']: if (prop_name not in client_info): raise InvalidClientSecretsError('Missing property "{0}" in a client type of "{1}".'.format(prop_name, client_type)) for prop_name in VALID_CLIENT[client_type]['string']: if client_info[prop_name].startswith('[['): raise InvalidClientSecretsError('Property "{0}" is not configured.'.format(prop_name)) return (client_type, client_info)
[ "def", "_validate_clientsecrets", "(", "clientsecrets_dict", ")", ":", "_INVALID_FILE_FORMAT_MSG", "=", "'Invalid file format. See https://developers.google.com/api-client-library/python/guide/aaa_client_secrets'", "if", "(", "clientsecrets_dict", "is", "None", ")", ":", "raise", "...
validate parsed client secrets from a file .
train
true
33,738
@pytest.fixture(autouse=True, scope='session') def setup_db_if_needed(request, tests_use_db): if (tests_use_db and (not request.config.getvalue('reuse_db'))): return request.getfuncargvalue('post_db_setup')
[ "@", "pytest", ".", "fixture", "(", "autouse", "=", "True", ",", "scope", "=", "'session'", ")", "def", "setup_db_if_needed", "(", "request", ",", "tests_use_db", ")", ":", "if", "(", "tests_use_db", "and", "(", "not", "request", ".", "config", ".", "get...
sets up the site db only if tests requested to use the db .
train
false
33,739
def call_len(context, builder, ty, val): try: len_impl = context.get_function(len, typing.signature(types.intp, ty)) except NotImplementedError: return None else: return len_impl(builder, (val,))
[ "def", "call_len", "(", "context", ",", "builder", ",", "ty", ",", "val", ")", ":", "try", ":", "len_impl", "=", "context", ".", "get_function", "(", "len", ",", "typing", ".", "signature", "(", "types", ".", "intp", ",", "ty", ")", ")", "except", ...
call len() on the given value .
train
false
33,740
def get_yaml(path): single = re.compile('^\\$\\{(.*)\\}$') yaml.add_implicit_resolver('!single', single) def single_constructor(loader, node): value = loader.construct_scalar(node) proto = single.match(value).group(1) default = None if (len(proto.split(':')) > 1): (envvar, default) = proto.split(':') else: envvar = proto return (os.environ[envvar] if (envvar in os.environ) else default) yaml.add_constructor('!single', single_constructor) raw = read_file(path) try: cfg = yaml.load(raw) except yaml.scanner.ScannerError as e: raise ConfigurationError('Unable to parse YAML file. Error: {0}'.format(e)) return cfg
[ "def", "get_yaml", "(", "path", ")", ":", "single", "=", "re", ".", "compile", "(", "'^\\\\$\\\\{(.*)\\\\}$'", ")", "yaml", ".", "add_implicit_resolver", "(", "'!single'", ",", "single", ")", "def", "single_constructor", "(", "loader", ",", "node", ")", ":",...
read the file identified by path and import its yaml contents .
train
false
33,741
@hook.command def flip(text, reply): reply(formatting.multi_replace(text[::(-1)].lower(), replacements))
[ "@", "hook", ".", "command", "def", "flip", "(", "text", ",", "reply", ")", ":", "reply", "(", "formatting", ".", "multi_replace", "(", "text", "[", ":", ":", "(", "-", "1", ")", "]", ".", "lower", "(", ")", ",", "replacements", ")", ")" ]
call the function call with the arguments flipped this function is curried .
train
false
33,742
def shutil_which(pgm): path = os.getenv('PATH') for p in path.split(os.path.pathsep): p = os.path.join(p, pgm) if (os.path.exists(p) and os.access(p, os.X_OK)): return p
[ "def", "shutil_which", "(", "pgm", ")", ":", "path", "=", "os", ".", "getenv", "(", "'PATH'", ")", "for", "p", "in", "path", ".", "split", "(", "os", ".", "path", ".", "pathsep", ")", ":", "p", "=", "os", ".", "path", ".", "join", "(", "p", "...
python 2 compatibility: backport of shutil .
train
true
33,744
def test_bad_inheritance(): def f(): class x(System.Single, ): pass def g(): class x(System.Version, ): pass AssertErrorWithPartialMessage(TypeError, 'System.Single', f) AssertErrorWithPartialMessage(TypeError, 'System.Version', g)
[ "def", "test_bad_inheritance", "(", ")", ":", "def", "f", "(", ")", ":", "class", "x", "(", "System", ".", "Single", ",", ")", ":", "pass", "def", "g", "(", ")", ":", "class", "x", "(", "System", ".", "Version", ",", ")", ":", "pass", "AssertErro...
verify a bad inheritance reports the type name youre inheriting from .
train
false
33,745
def _toggle_term_protect(name, value): instance_id = _get_node(name)['instanceId'] params = {'Action': 'ModifyInstanceAttribute', 'InstanceId': instance_id, 'DisableApiTermination.Value': value} result = aws.query(params, location=get_location(), provider=get_provider(), return_root=True, opts=__opts__, sigver='4') return show_term_protect(name=name, instance_id=instance_id, call='action')
[ "def", "_toggle_term_protect", "(", "name", ",", "value", ")", ":", "instance_id", "=", "_get_node", "(", "name", ")", "[", "'instanceId'", "]", "params", "=", "{", "'Action'", ":", "'ModifyInstanceAttribute'", ",", "'InstanceId'", ":", "instance_id", ",", "'D...
disable termination protection on a node cli example: .
train
true
33,747
def cmd_reload(args): mestate.console.writeln('Reloading graphs', fg='blue') load_graphs() setup_menus() mestate.console.write(('Loaded %u graphs\n' % len(mestate.graphs)))
[ "def", "cmd_reload", "(", "args", ")", ":", "mestate", ".", "console", ".", "writeln", "(", "'Reloading graphs'", ",", "fg", "=", "'blue'", ")", "load_graphs", "(", ")", "setup_menus", "(", ")", "mestate", ".", "console", ".", "write", "(", "(", "'Loaded...
reload graphs .
train
true
33,748
def ro(object): return mergeOrderings([_flatten(object)])
[ "def", "ro", "(", "object", ")", ":", "return", "mergeOrderings", "(", "[", "_flatten", "(", "object", ")", "]", ")" ]
compute a "resolution order" for an object .
train
false
33,749
def __listAllU6Unix(): deviceList = {} numDevices = staticLib.LJUSB_GetDevCount(LJ_dtU6) for i in xrange(numDevices): try: device = openLabJack(LJ_dtU6, 1, firstFound=False, devNumber=(i + 1)) device.close() deviceList[str(device.serialNumber)] = device.__dict__ except LabJackException: pass return deviceList
[ "def", "__listAllU6Unix", "(", ")", ":", "deviceList", "=", "{", "}", "numDevices", "=", "staticLib", ".", "LJUSB_GetDevCount", "(", "LJ_dtU6", ")", "for", "i", "in", "xrange", "(", "numDevices", ")", ":", "try", ":", "device", "=", "openLabJack", "(", "...
list all for u6s .
train
false
33,750
def _linab(arg, symbol): arg = arg.expand() (ind, dep) = arg.as_independent(symbol) if (not arg.is_Add): b = 0 (a, x) = (ind, dep) else: b = ind (a, x) = separatevars(dep).as_independent(symbol, as_Add=False) if x.could_extract_minus_sign(): a = (- a) x = (- x) return (a, b, x)
[ "def", "_linab", "(", "arg", ",", "symbol", ")", ":", "arg", "=", "arg", ".", "expand", "(", ")", "(", "ind", ",", "dep", ")", "=", "arg", ".", "as_independent", "(", "symbol", ")", "if", "(", "not", "arg", ".", "is_Add", ")", ":", "b", "=", ...
return a .
train
false
33,751
@register.as_tag def login_form(*args): return LoginForm()
[ "@", "register", ".", "as_tag", "def", "login_form", "(", "*", "args", ")", ":", "return", "LoginForm", "(", ")" ]
returns the login form: {% login_form as form %} {{ form }} .
train
false
33,752
def instance_get_all_by_filters(context, filters, sort_key='created_at', sort_dir='desc', limit=None, marker=None, columns_to_join=None): return IMPL.instance_get_all_by_filters(context, filters, sort_key, sort_dir, limit=limit, marker=marker, columns_to_join=columns_to_join)
[ "def", "instance_get_all_by_filters", "(", "context", ",", "filters", ",", "sort_key", "=", "'created_at'", ",", "sort_dir", "=", "'desc'", ",", "limit", "=", "None", ",", "marker", "=", "None", ",", "columns_to_join", "=", "None", ")", ":", "return", "IMPL"...
get all instances that match all filters .
train
false
33,753
def attribute_name(state, text, i, formats, user_data): ch = text[i] if (ch in space_chars): return [(1, None)] if (ch == u'='): state.parse = ATTRIBUTE_VALUE return [(1, formats[u'attr'])] state.parse = IN_OPENING_TAG state.attribute_name = None return [(0, None)]
[ "def", "attribute_name", "(", "state", ",", "text", ",", "i", ",", "formats", ",", "user_data", ")", ":", "ch", "=", "text", "[", "i", "]", "if", "(", "ch", "in", "space_chars", ")", ":", "return", "[", "(", "1", ",", "None", ")", "]", "if", "(...
after attribute name .
train
false
33,754
def _check_input(opens, closes, highs, lows, miss=(-1)): def _missing(sequence, miss=(-1)): u'Returns the index in *sequence* of the missing data, identified by\n *miss*\n\n Parameters\n ----------\n sequence :\n sequence to evaluate\n miss :\n identifier of the missing data\n\n Returns\n -------\n where_miss: numpy.ndarray\n indices of the missing data\n ' return np.where((np.array(sequence) == miss))[0] same_length = (len(opens) == len(highs) == len(lows) == len(closes)) _missopens = _missing(opens) same_missing = ((_missopens == _missing(highs)).all() and (_missopens == _missing(lows)).all() and (_missopens == _missing(closes)).all()) if (not (same_length and same_missing)): msg = u'*opens*, *highs*, *lows* and *closes* must have the same length. NOTE: this code assumes if any value open, high, low, close is missing (*-1*) they all must be missing.' raise ValueError(msg)
[ "def", "_check_input", "(", "opens", ",", "closes", ",", "highs", ",", "lows", ",", "miss", "=", "(", "-", "1", ")", ")", ":", "def", "_missing", "(", "sequence", ",", "miss", "=", "(", "-", "1", ")", ")", ":", "return", "np", ".", "where", "("...
checks that *opens* .
train
false
33,755
def req(): s3.filter = (s3db.req_req.is_template == False) output = req_controller() return output
[ "def", "req", "(", ")", ":", "s3", ".", "filter", "=", "(", "s3db", ".", "req_req", ".", "is_template", "==", "False", ")", "output", "=", "req_controller", "(", ")", "return", "output" ]
rest controller for request instances .
train
false
33,756
@app.teardown_appcontext def shutdown_session(exception=None): db_session.remove()
[ "@", "app", ".", "teardown_appcontext", "def", "shutdown_session", "(", "exception", "=", "None", ")", ":", "db_session", ".", "remove", "(", ")" ]
automatically remove database sessions at the end of the request .
train
false
33,757
def unary_math_intr(fn, intrcode): @lower(fn, types.Float) def float_impl(context, builder, sig, args): res = call_fp_intrinsic(builder, intrcode, args) return impl_ret_untracked(context, builder, sig.return_type, res) unary_math_int_impl(fn, float_impl) return float_impl
[ "def", "unary_math_intr", "(", "fn", ",", "intrcode", ")", ":", "@", "lower", "(", "fn", ",", "types", ".", "Float", ")", "def", "float_impl", "(", "context", ",", "builder", ",", "sig", ",", "args", ")", ":", "res", "=", "call_fp_intrinsic", "(", "b...
implement the math function *fn* using the llvm intrinsic *intrcode* .
train
false
33,758
def timestr_to_secs(timestr, round_to=3): if (is_string(timestr) or is_number(timestr)): for converter in (_number_to_secs, _timer_to_secs, _time_string_to_secs): secs = converter(timestr) if (secs is not None): return (secs if (round_to is None) else roundup(secs, round_to)) raise ValueError(("Invalid time string '%s'." % timestr))
[ "def", "timestr_to_secs", "(", "timestr", ",", "round_to", "=", "3", ")", ":", "if", "(", "is_string", "(", "timestr", ")", "or", "is_number", "(", "timestr", ")", ")", ":", "for", "converter", "in", "(", "_number_to_secs", ",", "_timer_to_secs", ",", "_...
parses time like 1h 10s .
train
false
33,759
def find_element(tagid, sel): if isinstance(tagid, _Element): return tagid if isinstance(tagid, dict): tagid = tagid.get('tagid') elements = sel.xpath(('//*[@data-tagid="%s"]' % tagid)) if elements: return elements[0].root
[ "def", "find_element", "(", "tagid", ",", "sel", ")", ":", "if", "isinstance", "(", "tagid", ",", "_Element", ")", ":", "return", "tagid", "if", "isinstance", "(", "tagid", ",", "dict", ")", ":", "tagid", "=", "tagid", ".", "get", "(", "'tagid'", ")"...
find an element by its tagid .
train
false
33,760
@authenticated_json_post_view @has_request_variables def json_tutorial_send_message(request, user_profile, type=REQ(validator=check_string), recipient=REQ(validator=check_string), topic=REQ(validator=check_string), content=REQ(validator=check_string)): sender_name = 'welcome-bot@zulip.com' if (type == 'stream'): internal_send_message(user_profile.realm, sender_name, 'stream', recipient, topic, content) return json_success() return json_error(_('Bad data passed in to tutorial_send_message'))
[ "@", "authenticated_json_post_view", "@", "has_request_variables", "def", "json_tutorial_send_message", "(", "request", ",", "user_profile", ",", "type", "=", "REQ", "(", "validator", "=", "check_string", ")", ",", "recipient", "=", "REQ", "(", "validator", "=", "...
this function .
train
false
33,761
def ProcessAXScriptException(scriptingSite, debugManager, exceptionInstance): instance = IActiveScriptError() instance._SetExceptionInfo(exceptionInstance) gateway = win32com.server.util.wrap(instance, axscript.IID_IActiveScriptError) if debugManager: fCallOnError = debugManager.HandleRuntimeError() if (not fCallOnError): return None try: result = scriptingSite.OnScriptError(gateway) except pythoncom.com_error as details: print '**OnScriptError failed:', details print ("Exception description:'%s'" % repr(exceptionInstance.description)) print ("Exception text:'%s'" % repr(exceptionInstance.linetext)) result = winerror.S_FALSE if (result == winerror.S_OK): ret = win32com.server.exception.COMException(scode=axscript.SCRIPT_E_REPORTED) return ret else: return exceptionInstance
[ "def", "ProcessAXScriptException", "(", "scriptingSite", ",", "debugManager", ",", "exceptionInstance", ")", ":", "instance", "=", "IActiveScriptError", "(", ")", "instance", ".", "_SetExceptionInfo", "(", "exceptionInstance", ")", "gateway", "=", "win32com", ".", "...
general function to handle any exception in ax code this function creates an instance of our iactivescripterror interface .
train
false