id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
31,099
@require_backend('gce') def gceblockdeviceapi_for_test(test_case): return get_blockdeviceapi_with_cleanup(test_case)
[ "@", "require_backend", "(", "'gce'", ")", "def", "gceblockdeviceapi_for_test", "(", "test_case", ")", ":", "return", "get_blockdeviceapi_with_cleanup", "(", "test_case", ")" ]
create a gceblockdeviceapi for use by tests .
train
false
31,100
def make_nsis_patterns(): kwstr1 = 'Abort AddBrandingImage AddSize AllowRootDirInstall AllowSkipFiles AutoCloseWindow BGFont BGGradient BrandingText BringToFront Call CallInstDLL Caption ClearErrors CompletedText ComponentText CopyFiles CRCCheck CreateDirectory CreateFont CreateShortCut Delete DeleteINISec DeleteINIStr DeleteRegKey DeleteRegValue DetailPrint DetailsButtonText DirText DirVar DirVerify EnableWindow EnumRegKey EnumRegValue Exec ExecShell ExecWait Exch ExpandEnvStrings File FileBufSize FileClose FileErrorText FileOpen FileRead FileReadByte FileSeek FileWrite FileWriteByte FindClose FindFirst FindNext FindWindow FlushINI Function FunctionEnd GetCurInstType GetCurrentAddress GetDlgItem GetDLLVersion GetDLLVersionLocal GetErrorLevel GetFileTime GetFileTimeLocal GetFullPathName GetFunctionAddress GetInstDirError GetLabelAddress GetTempFileName Goto HideWindow ChangeUI CheckBitmap Icon IfAbort IfErrors IfFileExists IfRebootFlag IfSilent InitPluginsDir InstallButtonText InstallColors InstallDir InstallDirRegKey InstProgressFlags InstType InstTypeGetText InstTypeSetText IntCmp IntCmpU IntFmt IntOp IsWindow LangString LicenseBkColor LicenseData LicenseForceSelection LicenseLangString LicenseText LoadLanguageFile LogSet LogText MessageBox MiscButtonText Name OutFile Page PageCallbacks PageEx PageExEnd Pop Push Quit ReadEnvStr ReadINIStr ReadRegDWORD ReadRegStr Reboot RegDLL Rename ReserveFile Return RMDir SearchPath Section SectionEnd SectionGetFlags SectionGetInstTypes SectionGetSize SectionGetText SectionIn SectionSetFlags SectionSetInstTypes SectionSetSize SectionSetText SendMessage SetAutoClose SetBrandingImage SetCompress SetCompressor SetCompressorDictSize SetCtlColors SetCurInstType SetDatablockOptimize SetDateSave SetDetailsPrint SetDetailsView SetErrorLevel SetErrors SetFileAttributes SetFont SetOutPath SetOverwrite SetPluginUnload SetRebootFlag SetShellVarContext SetSilent ShowInstDetails ShowUninstDetails ShowWindow SilentInstall SilentUnInstall Sleep SpaceTexts StrCmp StrCpy StrLen SubCaption SubSection SubSectionEnd UninstallButtonText UninstallCaption UninstallIcon UninstallSubCaption UninstallText UninstPage UnRegDLL Var VIAddVersionKey VIProductVersion WindowIcon WriteINIStr WriteRegBin WriteRegDWORD WriteRegExpandStr WriteRegStr WriteUninstaller XPStyle' kwstr2 = 'all alwaysoff ARCHIVE auto both bzip2 components current custom details directory false FILE_ATTRIBUTE_ARCHIVE FILE_ATTRIBUTE_HIDDEN FILE_ATTRIBUTE_NORMAL FILE_ATTRIBUTE_OFFLINE FILE_ATTRIBUTE_READONLY FILE_ATTRIBUTE_SYSTEM FILE_ATTRIBUTE_TEMPORARY force grey HIDDEN hide IDABORT IDCANCEL IDIGNORE IDNO IDOK IDRETRY IDYES ifdiff ifnewer instfiles instfiles lastused leave left level license listonly lzma manual MB_ABORTRETRYIGNORE MB_DEFBUTTON1 MB_DEFBUTTON2 MB_DEFBUTTON3 MB_DEFBUTTON4 MB_ICONEXCLAMATION MB_ICONINFORMATION MB_ICONQUESTION MB_ICONSTOP MB_OK MB_OKCANCEL MB_RETRYCANCEL MB_RIGHT MB_SETFOREGROUND MB_TOPMOST MB_YESNO MB_YESNOCANCEL nevershow none NORMAL off OFFLINE on READONLY right RO show silent silentlog SYSTEM TEMPORARY text textonly true try uninstConfirm windows zlib' kwstr3 = 'MUI_ABORTWARNING MUI_ABORTWARNING_CANCEL_DEFAULT MUI_ABORTWARNING_TEXT MUI_BGCOLOR MUI_COMPONENTSPAGE_CHECKBITMAP MUI_COMPONENTSPAGE_NODESC MUI_COMPONENTSPAGE_SMALLDESC MUI_COMPONENTSPAGE_TEXT_COMPLIST MUI_COMPONENTSPAGE_TEXT_DESCRIPTION_INFO MUI_COMPONENTSPAGE_TEXT_DESCRIPTION_TITLE MUI_COMPONENTSPAGE_TEXT_INSTTYPE MUI_COMPONENTSPAGE_TEXT_TOP MUI_CUSTOMFUNCTION_ABORT MUI_CUSTOMFUNCTION_GUIINIT MUI_CUSTOMFUNCTION_UNABORT MUI_CUSTOMFUNCTION_UNGUIINIT MUI_DESCRIPTION_TEXT MUI_DIRECTORYPAGE_BGCOLOR MUI_DIRECTORYPAGE_TEXT_DESTINATION MUI_DIRECTORYPAGE_TEXT_TOP MUI_DIRECTORYPAGE_VARIABLE MUI_DIRECTORYPAGE_VERIFYONLEAVE MUI_FINISHPAGE_BUTTON MUI_FINISHPAGE_CANCEL_ENABLED MUI_FINISHPAGE_LINK MUI_FINISHPAGE_LINK_COLOR MUI_FINISHPAGE_LINK_LOCATION MUI_FINISHPAGE_NOAUTOCLOSE MUI_FINISHPAGE_NOREBOOTSUPPORT MUI_FINISHPAGE_REBOOTLATER_DEFAULT MUI_FINISHPAGE_RUN MUI_FINISHPAGE_RUN_FUNCTION MUI_FINISHPAGE_RUN_NOTCHECKED MUI_FINISHPAGE_RUN_PARAMETERS MUI_FINISHPAGE_RUN_TEXT MUI_FINISHPAGE_SHOWREADME MUI_FINISHPAGE_SHOWREADME_FUNCTION MUI_FINISHPAGE_SHOWREADME_NOTCHECKED MUI_FINISHPAGE_SHOWREADME_TEXT MUI_FINISHPAGE_TEXT MUI_FINISHPAGE_TEXT_LARGE MUI_FINISHPAGE_TEXT_REBOOT MUI_FINISHPAGE_TEXT_REBOOTLATER MUI_FINISHPAGE_TEXT_REBOOTNOW MUI_FINISHPAGE_TITLE MUI_FINISHPAGE_TITLE_3LINES MUI_FUNCTION_DESCRIPTION_BEGIN MUI_FUNCTION_DESCRIPTION_END MUI_HEADER_TEXT MUI_HEADER_TRANSPARENT_TEXT MUI_HEADERIMAGE MUI_HEADERIMAGE_BITMAP MUI_HEADERIMAGE_BITMAP_NOSTRETCH MUI_HEADERIMAGE_BITMAP_RTL MUI_HEADERIMAGE_BITMAP_RTL_NOSTRETCH MUI_HEADERIMAGE_RIGHT MUI_HEADERIMAGE_UNBITMAP MUI_HEADERIMAGE_UNBITMAP_NOSTRETCH MUI_HEADERIMAGE_UNBITMAP_RTL MUI_HEADERIMAGE_UNBITMAP_RTL_NOSTRETCH MUI_HWND MUI_ICON MUI_INSTALLCOLORS MUI_INSTALLOPTIONS_DISPLAY MUI_INSTALLOPTIONS_DISPLAY_RETURN MUI_INSTALLOPTIONS_EXTRACT MUI_INSTALLOPTIONS_EXTRACT_AS MUI_INSTALLOPTIONS_INITDIALOG MUI_INSTALLOPTIONS_READ MUI_INSTALLOPTIONS_SHOW MUI_INSTALLOPTIONS_SHOW_RETURN MUI_INSTALLOPTIONS_WRITE MUI_INSTFILESPAGE_ABORTHEADER_SUBTEXT MUI_INSTFILESPAGE_ABORTHEADER_TEXT MUI_INSTFILESPAGE_COLORS MUI_INSTFILESPAGE_FINISHHEADER_SUBTEXT MUI_INSTFILESPAGE_FINISHHEADER_TEXT MUI_INSTFILESPAGE_PROGRESSBAR MUI_LANGDLL_ALLLANGUAGES MUI_LANGDLL_ALWAYSSHOW MUI_LANGDLL_DISPLAY MUI_LANGDLL_INFO MUI_LANGDLL_REGISTRY_KEY MUI_LANGDLL_REGISTRY_ROOT MUI_LANGDLL_REGISTRY_VALUENAME MUI_LANGDLL_WINDOWTITLE MUI_LANGUAGE MUI_LICENSEPAGE_BGCOLOR MUI_LICENSEPAGE_BUTTON MUI_LICENSEPAGE_CHECKBOX MUI_LICENSEPAGE_CHECKBOX_TEXT MUI_LICENSEPAGE_RADIOBUTTONS MUI_LICENSEPAGE_RADIOBUTTONS_TEXT_ACCEPT MUI_LICENSEPAGE_RADIOBUTTONS_TEXT_DECLINE MUI_LICENSEPAGE_TEXT_BOTTOM MUI_LICENSEPAGE_TEXT_TOP MUI_PAGE_COMPONENTS MUI_PAGE_CUSTOMFUNCTION_LEAVE MUI_PAGE_CUSTOMFUNCTION_PRE MUI_PAGE_CUSTOMFUNCTION_SHOW MUI_PAGE_DIRECTORY MUI_PAGE_FINISH MUI_PAGE_HEADER_SUBTEXT MUI_PAGE_HEADER_TEXT MUI_PAGE_INSTFILES MUI_PAGE_LICENSE MUI_PAGE_STARTMENU MUI_PAGE_WELCOME MUI_RESERVEFILE_INSTALLOPTIONS MUI_RESERVEFILE_LANGDLL MUI_SPECIALINI MUI_STARTMENU_GETFOLDER MUI_STARTMENU_WRITE_BEGIN MUI_STARTMENU_WRITE_END MUI_STARTMENUPAGE_BGCOLOR MUI_STARTMENUPAGE_DEFAULTFOLDER MUI_STARTMENUPAGE_NODISABLE MUI_STARTMENUPAGE_REGISTRY_KEY MUI_STARTMENUPAGE_REGISTRY_ROOT MUI_STARTMENUPAGE_REGISTRY_VALUENAME MUI_STARTMENUPAGE_TEXT_CHECKBOX MUI_STARTMENUPAGE_TEXT_TOP MUI_UI MUI_UI_COMPONENTSPAGE_NODESC MUI_UI_COMPONENTSPAGE_SMALLDESC MUI_UI_HEADERIMAGE MUI_UI_HEADERIMAGE_RIGHT MUI_UNABORTWARNING MUI_UNABORTWARNING_CANCEL_DEFAULT MUI_UNABORTWARNING_TEXT MUI_UNCONFIRMPAGE_TEXT_LOCATION MUI_UNCONFIRMPAGE_TEXT_TOP MUI_UNFINISHPAGE_NOAUTOCLOSE MUI_UNFUNCTION_DESCRIPTION_BEGIN MUI_UNFUNCTION_DESCRIPTION_END MUI_UNGETLANGUAGE MUI_UNICON MUI_UNPAGE_COMPONENTS MUI_UNPAGE_CONFIRM MUI_UNPAGE_DIRECTORY MUI_UNPAGE_FINISH MUI_UNPAGE_INSTFILES MUI_UNPAGE_LICENSE MUI_UNPAGE_WELCOME MUI_UNWELCOMEFINISHPAGE_BITMAP MUI_UNWELCOMEFINISHPAGE_BITMAP_NOSTRETCH MUI_UNWELCOMEFINISHPAGE_INI MUI_WELCOMEFINISHPAGE_BITMAP MUI_WELCOMEFINISHPAGE_BITMAP_NOSTRETCH MUI_WELCOMEFINISHPAGE_CUSTOMFUNCTION_INIT MUI_WELCOMEFINISHPAGE_INI MUI_WELCOMEPAGE_TEXT MUI_WELCOMEPAGE_TITLE MUI_WELCOMEPAGE_TITLE_3LINES' bistr = 'addincludedir addplugindir AndIf cd define echo else endif error execute If ifdef ifmacrodef ifmacrondef ifndef include insertmacro macro macroend onGUIEnd onGUIInit onInit onInstFailed onInstSuccess onMouseOverSection onRebootFailed onSelChange onUserAbort onVerifyInstDir OrIf packhdr system undef verbose warning' instance = any('instance', ['\\$\\{.*?\\}', '\\$[A-Za-z0-9\\_]*']) define = any('define', ['\\![^\\n]*']) comment = any('comment', ['\\;[^\\n]*', '\\#[^\\n]*', '\\/\\*(.*?)\\*\\/']) return make_generic_c_patterns(((((kwstr1 + ' ') + kwstr2) + ' ') + kwstr3), bistr, instance=instance, define=define, comment=comment)
[ "def", "make_nsis_patterns", "(", ")", ":", "kwstr1", "=", "'Abort AddBrandingImage AddSize AllowRootDirInstall AllowSkipFiles AutoCloseWindow BGFont BGGradient BrandingText BringToFront Call CallInstDLL Caption ClearErrors CompletedText ComponentText CopyFiles CRCCheck CreateDirectory CreateFont Crea...
strongly inspired from idlelib .
train
false
31,101
def get_adjacent_distances(dist_matrix_header, dist_matrix, sample_ids, strict=False): filtered_idx = [] filtered_sids = [] for sid in sample_ids: try: idx = dist_matrix_header.index(sid) except ValueError: if strict: raise ValueError(('Sample ID (%s) is not present in distance matrix' % sid)) else: pass else: filtered_idx.append(idx) filtered_sids.append(sid) if (len(filtered_idx) < 2): raise ValueError(('At least two of your sample_ids must be present in the distance matrix. %d are present.' % len(filtered_idx))) distance_results = [] header_results = [] for i in range((len(filtered_idx) - 1)): distance_results.append(dist_matrix[filtered_idx[i]][filtered_idx[(i + 1)]]) header_results.append((filtered_sids[i], filtered_sids[(i + 1)])) return (distance_results, header_results)
[ "def", "get_adjacent_distances", "(", "dist_matrix_header", ",", "dist_matrix", ",", "sample_ids", ",", "strict", "=", "False", ")", ":", "filtered_idx", "=", "[", "]", "filtered_sids", "=", "[", "]", "for", "sid", "in", "sample_ids", ":", "try", ":", "idx",...
return the distances between the adjacent sample_ids as a list dist_matrix_header: distance matrix headers .
train
false
31,103
def load_workbook(filename, use_iterators=False): if isinstance(filename, file): if ('b' not in filename.mode): raise OpenModeError('File-object must be opened in binary mode') try: archive = ZipFile(filename, 'r', ZIP_DEFLATED) except (BadZipfile, RuntimeError, IOError, ValueError): raise InvalidFileException() wb = Workbook() if use_iterators: wb._set_optimized_read() try: _load_workbook(wb, archive, filename, use_iterators) except KeyError: raise InvalidFileException() finally: archive.close() return wb
[ "def", "load_workbook", "(", "filename", ",", "use_iterators", "=", "False", ")", ":", "if", "isinstance", "(", "filename", ",", "file", ")", ":", "if", "(", "'b'", "not", "in", "filename", ".", "mode", ")", ":", "raise", "OpenModeError", "(", "'File-obj...
open the given filename and return the workbook .
train
false
31,104
def post_mortem_excepthook(type, value, tb): clear_post_mortem() if IS_IPYKERNEL: from IPython.core.getipython import get_ipython ipython_shell = get_ipython() ipython_shell.showtraceback((type, value, tb)) p = pdb.Pdb(ipython_shell.colors) else: traceback.print_exception(type, value, tb, file=sys.stderr) p = pdb.Pdb() if (not (type == SyntaxError)): time.sleep(0.1) _print(('*' * 40)) _print('Entering post mortem debugging...') _print(('*' * 40)) p.send_initial_notification = False p.reset() frame = tb.tb_frame prev = frame while frame.f_back: prev = frame frame = frame.f_back frame = prev time.sleep(0.1) p.interaction(frame, tb)
[ "def", "post_mortem_excepthook", "(", "type", ",", "value", ",", "tb", ")", ":", "clear_post_mortem", "(", ")", "if", "IS_IPYKERNEL", ":", "from", "IPython", ".", "core", ".", "getipython", "import", "get_ipython", "ipython_shell", "=", "get_ipython", "(", ")"...
for post mortem exception handling .
train
false
31,105
def set_limit(limit, force=False, reset_after=False, ignore_errors=False): limit = (limit or 0) glimit = (_limit[0] or 0) if (limit != glimit): _limit[0] = limit for pool in _all_pools(): pool.resize(limit) return limit
[ "def", "set_limit", "(", "limit", ",", "force", "=", "False", ",", "reset_after", "=", "False", ",", "ignore_errors", "=", "False", ")", ":", "limit", "=", "(", "limit", "or", "0", ")", "glimit", "=", "(", "_limit", "[", "0", "]", "or", "0", ")", ...
set new connection pool limit .
train
false
31,106
@not_implemented_for('directed') @not_implemented_for('multigraph') def rich_club_coefficient(G, normalized=True, Q=100): if (G.number_of_selfloops() > 0): raise Exception('rich_club_coefficient is not implemented for graphs with self loops.') rc = _compute_rc(G) if normalized: R = G.copy(with_data=False) E = R.number_of_edges() nx.double_edge_swap(R, (Q * E), max_tries=((Q * E) * 10)) rcran = _compute_rc(R) rc = {k: (v / rcran[k]) for (k, v) in rc.items()} return rc
[ "@", "not_implemented_for", "(", "'directed'", ")", "@", "not_implemented_for", "(", "'multigraph'", ")", "def", "rich_club_coefficient", "(", "G", ",", "normalized", "=", "True", ",", "Q", "=", "100", ")", ":", "if", "(", "G", ".", "number_of_selfloops", "(...
returns the rich-club coefficient of the graph g .
train
false
31,107
def run_raw(command): proc = subprocess.Popen(command, shell=True, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, env=_make_env()) data = proc.stdout.read() proc.wait() while (data.endswith('\n') or data.endswith('\r')): data = data[:(-1)] if data: data = '\n'.join([l for l in data.splitlines() if l]) return data else: return ''
[ "def", "run_raw", "(", "command", ")", ":", "proc", "=", "subprocess", ".", "Popen", "(", "command", ",", "shell", "=", "True", ",", "stderr", "=", "subprocess", ".", "STDOUT", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "env", "=", "_make_env"...
runs the string command .
train
false
31,112
def filter_a(name, value): if (name in ('title', 'target')): return True if (name == 'href'): url_components = urlparse.urlsplit(value) if (url_components[0] in ['http', 'https']): return True logging.error(('Found invalid URL href: %s' % value)) return False
[ "def", "filter_a", "(", "name", ",", "value", ")", ":", "if", "(", "name", "in", "(", "'title'", ",", "'target'", ")", ")", ":", "return", "True", "if", "(", "name", "==", "'href'", ")", ":", "url_components", "=", "urlparse", ".", "urlsplit", "(", ...
returns whether the described attribute of an anchor (a) tag should be whitelisted .
train
false
31,113
def from_legacy_mapping(legacy_block_device_mapping, image_uuid='', root_device_name=None, no_root=False): new_bdms = [BlockDeviceDict.from_legacy(legacy_bdm) for legacy_bdm in legacy_block_device_mapping] if no_root: for bdm in new_bdms: bdm['boot_index'] = (-1) return new_bdms image_bdm = None volume_backed = False if ((not root_device_name) and (not image_uuid)): non_blank = [bdm for bdm in new_bdms if (bdm['source_type'] != 'blank')] if non_blank: non_blank[0]['boot_index'] = 0 else: for bdm in new_bdms: if ((bdm['source_type'] in ('volume', 'snapshot', 'image')) and (root_device_name is not None) and (strip_dev(bdm.get('device_name')) == strip_dev(root_device_name))): bdm['boot_index'] = 0 volume_backed = True elif (not bdm['no_device']): bdm['boot_index'] = (-1) else: bdm['boot_index'] = None if ((not volume_backed) and image_uuid): image_bdm = create_image_bdm(image_uuid, boot_index=0) return (([image_bdm] if image_bdm else []) + new_bdms)
[ "def", "from_legacy_mapping", "(", "legacy_block_device_mapping", ",", "image_uuid", "=", "''", ",", "root_device_name", "=", "None", ",", "no_root", "=", "False", ")", ":", "new_bdms", "=", "[", "BlockDeviceDict", ".", "from_legacy", "(", "legacy_bdm", ")", "fo...
transform a legacy list of block devices to the new data format .
train
false
31,114
def estimate_spectral_norm(A, its=20): from scipy.sparse.linalg import aslinearoperator A = aslinearoperator(A) (m, n) = A.shape matvec = (lambda x: A.matvec(x)) matveca = (lambda x: A.rmatvec(x)) if _is_real(A): return backend.idd_snorm(m, n, matveca, matvec, its=its) else: return backend.idz_snorm(m, n, matveca, matvec, its=its)
[ "def", "estimate_spectral_norm", "(", "A", ",", "its", "=", "20", ")", ":", "from", "scipy", ".", "sparse", ".", "linalg", "import", "aslinearoperator", "A", "=", "aslinearoperator", "(", "A", ")", "(", "m", ",", "n", ")", "=", "A", ".", "shape", "ma...
estimate spectral norm of a matrix by the randomized power method .
train
false
31,115
def p_translation_unit_1(t): pass
[ "def", "p_translation_unit_1", "(", "t", ")", ":", "pass" ]
translation_unit : external_declaration .
train
false
31,116
def test_forum_update_read_two_topics(database, user, topic, topic_moderator): forumsread = ForumsRead.query.filter((ForumsRead.user_id == user.id), (ForumsRead.forum_id == topic.forum_id)).first() forum = topic.forum with current_app.test_request_context(): login_user(user) topicsread = TopicsRead() topicsread.user_id = user.id topicsread.topic_id = topic.id topicsread.forum_id = topic.forum_id topicsread.last_read = datetime.utcnow() topicsread.save() assert (not forum.update_read(current_user, forumsread, topicsread))
[ "def", "test_forum_update_read_two_topics", "(", "database", ",", "user", ",", "topic", ",", "topic_moderator", ")", ":", "forumsread", "=", "ForumsRead", ".", "query", ".", "filter", "(", "(", "ForumsRead", ".", "user_id", "==", "user", ".", "id", ")", ",",...
test if the forumsread tracker will be updated if there are two topics and where one is unread and the other is read .
train
false
31,117
def action_event_finish(context, values): convert_datetimes(values, 'start_time', 'finish_time') session = get_session() with session.begin(): action = _action_get_by_request_id(context, values['instance_uuid'], values['request_id'], session) if (not action): raise exception.InstanceActionNotFound(request_id=values['request_id'], instance_uuid=values['instance_uuid']) event_ref = model_query(context, models.InstanceActionEvent, session=session).filter_by(action_id=action['id']).filter_by(event=values['event']).first() if (not event_ref): raise exception.InstanceActionEventNotFound(action_id=action['id'], event=values['event']) event_ref.update(values) if (values['result'].lower() == 'error'): action.update({'message': 'Error'}) return event_ref
[ "def", "action_event_finish", "(", "context", ",", "values", ")", ":", "convert_datetimes", "(", "values", ",", "'start_time'", ",", "'finish_time'", ")", "session", "=", "get_session", "(", ")", "with", "session", ".", "begin", "(", ")", ":", "action", "=",...
finish an event on an instance action .
train
false
31,119
@unbox(SeriesType) def unbox_series(typ, obj, c): index = c.pyapi.object_getattr_string(obj, '_index') values = c.pyapi.object_getattr_string(obj, '_values') series = make_series(c.context, c.builder, typ) series.index = c.unbox(typ.index, index).value series.values = c.unbox(typ.values, values).value return NativeValue(series._getvalue())
[ "@", "unbox", "(", "SeriesType", ")", "def", "unbox_series", "(", "typ", ",", "obj", ",", "c", ")", ":", "index", "=", "c", ".", "pyapi", ".", "object_getattr_string", "(", "obj", ",", "'_index'", ")", "values", "=", "c", ".", "pyapi", ".", "object_g...
convert a series object to a native structure .
train
false
31,120
def get_current_babel_locale(fallback='en-US-POSIX'): locale = get_babel_locale(locale_string=translation.get_language()) if (not locale): if fallback: locale = get_babel_locale(fallback) if (not locale): raise ValueError(('Failed to get current babel locale (lang=%s)' % (translation.get_language(),))) return locale
[ "def", "get_current_babel_locale", "(", "fallback", "=", "'en-US-POSIX'", ")", ":", "locale", "=", "get_babel_locale", "(", "locale_string", "=", "translation", ".", "get_language", "(", ")", ")", "if", "(", "not", "locale", ")", ":", "if", "fallback", ":", ...
get a babel locale based on the threads locale context .
train
false
31,122
def parse_date_pattern(name): prev = '' curr = '' rendered = '' for s in range(0, len(name)): curr = name[s] if (curr == '%'): pass elif ((curr in settings.date_regex()) and (prev == '%')): rendered += str(datetime.utcnow().strftime('%{0}'.format(curr))) else: rendered += curr logger.debug('Partially rendered name: {0}'.format(rendered)) prev = curr logger.debug('Fully rendered name: {0}'.format(rendered)) return rendered
[ "def", "parse_date_pattern", "(", "name", ")", ":", "prev", "=", "''", "curr", "=", "''", "rendered", "=", "''", "for", "s", "in", "range", "(", "0", ",", "len", "(", "name", ")", ")", ":", "curr", "=", "name", "[", "s", "]", "if", "(", "curr",...
scan and parse name for :py:func:time .
train
false
31,123
def cpython_only(test): return impl_detail(cpython=True)(test)
[ "def", "cpython_only", "(", "test", ")", ":", "return", "impl_detail", "(", "cpython", "=", "True", ")", "(", "test", ")" ]
decorator for tests only applicable on cpython .
train
false
31,124
def np_array_datetime64_compat(arr, *args, **kwargs): if (not _np_version_under1p11): if (hasattr(arr, '__iter__') and (not isinstance(arr, string_and_binary_types))): arr = [tz_replacer(s) for s in arr] else: arr = tz_replacer(arr) return np.array(arr, *args, **kwargs)
[ "def", "np_array_datetime64_compat", "(", "arr", ",", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "not", "_np_version_under1p11", ")", ":", "if", "(", "hasattr", "(", "arr", ",", "'__iter__'", ")", "and", "(", "not", "isinstance", "(", "arr", ...
provide compat for construction of an array of strings to a np .
train
false
31,125
def login_token_status(token): return get_token_status(token, 'login', 'LOGIN')
[ "def", "login_token_status", "(", "token", ")", ":", "return", "get_token_status", "(", "token", ",", "'login'", ",", "'LOGIN'", ")" ]
returns the expired status .
train
false
31,127
def no_nonsense(value): value = str(value).strip() if (value.lower() == 'none'): value = '' return (None, value)
[ "def", "no_nonsense", "(", "value", ")", ":", "value", "=", "str", "(", "value", ")", ".", "strip", "(", ")", "if", "(", "value", ".", "lower", "(", ")", "==", "'none'", ")", ":", "value", "=", "''", "return", "(", "None", ",", "value", ")" ]
strip and filter out none and none from strings .
train
false
31,128
def old_lease(mac, ip_address): pass
[ "def", "old_lease", "(", "mac", ",", "ip_address", ")", ":", "pass" ]
called when an old lease is recognized .
train
false
31,131
def lookupHostInfo(name, timeout=None): return getResolver().lookupHostInfo(name, timeout)
[ "def", "lookupHostInfo", "(", "name", ",", "timeout", "=", "None", ")", ":", "return", "getResolver", "(", ")", ".", "lookupHostInfo", "(", "name", ",", "timeout", ")" ]
perform a hinfo record lookup .
train
false
31,133
def _test_port(family, sock_type, port): assert (family in (socket.AF_INET,)), ('Invalid family value %s' % family) assert (sock_type in (socket.SOCK_DGRAM, socket.SOCK_STREAM)), ('Invalid sock_type value %s' % sock_type) assert (0 < port <= 65535), ('Invalid port value %s' % port) s = None try: s = socket.socket(family, sock_type) if (sock_type == socket.SOCK_STREAM): s.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, struct.pack('ii', 1, 0)) s.bind(('', port)) is_port_working = True except socket.error as e: logger.debug('Port test failed (port=%s, family=%s, type=%s): %s', port, family, sock_type, e) is_port_working = False finally: if s: s.close() return is_port_working
[ "def", "_test_port", "(", "family", ",", "sock_type", ",", "port", ")", ":", "assert", "(", "family", "in", "(", "socket", ".", "AF_INET", ",", ")", ")", ",", "(", "'Invalid family value %s'", "%", "family", ")", "assert", "(", "sock_type", "in", "(", ...
tests if a port is available .
train
false
31,134
def list_monitored_resource_descriptors(client, project_resource): request = client.projects().monitoredResourceDescriptors().list(name=project_resource) response = request.execute() print 'list_monitored_resource_descriptors response:\n{}'.format(pprint.pformat(response))
[ "def", "list_monitored_resource_descriptors", "(", "client", ",", "project_resource", ")", ":", "request", "=", "client", ".", "projects", "(", ")", ".", "monitoredResourceDescriptors", "(", ")", ".", "list", "(", "name", "=", "project_resource", ")", "response", ...
query the projects .
train
false
31,136
def _utf8_str(s): if isinstance(s, unicode): return s.encode('utf-8') else: return str(s)
[ "def", "_utf8_str", "(", "s", ")", ":", "if", "isinstance", "(", "s", ",", "unicode", ")", ":", "return", "s", ".", "encode", "(", "'utf-8'", ")", "else", ":", "return", "str", "(", "s", ")" ]
convert unicode to utf-8 .
train
false
31,137
def explore_message(c, uid): msgdict = c.fetch(uid, ['BODYSTRUCTURE', 'FLAGS']) while True: print print 'Flags:', flaglist = msgdict[uid]['FLAGS'] if flaglist: print ' '.join(flaglist) else: print 'none' display_structure(msgdict[uid]['BODYSTRUCTURE']) print reply = raw_input(('Message %s - type a part name, or "q" to quit: ' % uid)).strip() print if reply.lower().startswith('q'): break key = ('BODY[%s]' % reply) try: msgdict2 = c.fetch(uid, [key]) except c._imap.error: print ('Error - cannot fetch section %r' % reply) else: content = msgdict2[uid][key] if content: print banner print content.strip() print banner else: print '(No such section)'
[ "def", "explore_message", "(", "c", ",", "uid", ")", ":", "msgdict", "=", "c", ".", "fetch", "(", "uid", ",", "[", "'BODYSTRUCTURE'", ",", "'FLAGS'", "]", ")", "while", "True", ":", "print", "print", "'Flags:'", ",", "flaglist", "=", "msgdict", "[", ...
let the user view various parts of a given message .
train
false
31,138
@bdd.when(bdd.parsers.parse('I set up a fake editor returning "{text}"')) def set_up_editor(quteproc, httpbin, tmpdir, text): script = (tmpdir / 'script.py') script.write(textwrap.dedent("\n import sys\n\n with open(sys.argv[1], 'w', encoding='utf-8') as f:\n f.write({text!r})\n ".format(text=text))) editor = '"{}" "{}" {{}}'.format(sys.executable, script) quteproc.set_setting('general', 'editor', editor)
[ "@", "bdd", ".", "when", "(", "bdd", ".", "parsers", ".", "parse", "(", "'I set up a fake editor returning \"{text}\"'", ")", ")", "def", "set_up_editor", "(", "quteproc", ",", "httpbin", ",", "tmpdir", ",", "text", ")", ":", "script", "=", "(", "tmpdir", ...
set up general->editor to a small python script inserting a text .
train
false
31,139
def classmethod2display(class_, method_, descriptor_): return ('%s -> %s ( %s )' % (class_, method_, descriptor_))
[ "def", "classmethod2display", "(", "class_", ",", "method_", ",", "descriptor_", ")", ":", "return", "(", "'%s -> %s ( %s )'", "%", "(", "class_", ",", "method_", ",", "descriptor_", ")", ")" ]
convert two strings such as "lcom/mwr/example/sieve/addentryactivity;" and "oncreate" into a beautiful :) string to display xrefs: "lcom/mwr/example/sieve/addentryactivity; -> oncreate" .
train
false
31,140
def lowercase_attr_names(tag): tag.attrs = dict([(key.lower(), value) for (key, value) in iteritems(tag.attrs)])
[ "def", "lowercase_attr_names", "(", "tag", ")", ":", "tag", ".", "attrs", "=", "dict", "(", "[", "(", "key", ".", "lower", "(", ")", ",", "value", ")", "for", "(", "key", ",", "value", ")", "in", "iteritems", "(", "tag", ".", "attrs", ")", "]", ...
lower-case all attribute names of the provided beautifulsoup tag .
train
true
31,142
def MonkeyPatchPdb(pdb): def NewSetTrace(): 'Replacement for set_trace() that uses the original i/o streams.\n\n This is necessary because by the time the user code that might\n invoke pdb.set_trace() runs, the default sys.stdin and sys.stdout\n are redirected to the HTTP request and response streams instead,\n so that pdb will encounter garbage (or EOF) in its input, and its\n output will garble the HTTP response. Fortunately, sys.__stdin__\n and sys.__stderr__ retain references to the original streams --\n this is a standard Python feature. Also, fortunately, as of\n Python 2.5, the Pdb class lets you easily override stdin and\n stdout. The original set_trace() function does essentially the\n same thing as the code here except it instantiates Pdb() without\n arguments.\n ' p = pdb.Pdb(stdin=sys.__stdin__, stdout=sys.__stdout__) p.set_trace(sys._getframe().f_back) pdb.set_trace = NewSetTrace
[ "def", "MonkeyPatchPdb", "(", "pdb", ")", ":", "def", "NewSetTrace", "(", ")", ":", "p", "=", "pdb", ".", "Pdb", "(", "stdin", "=", "sys", ".", "__stdin__", ",", "stdout", "=", "sys", ".", "__stdout__", ")", "p", ".", "set_trace", "(", "sys", ".", ...
given a reference to the pdb module .
train
false
31,144
def verify_fun(lazy_obj, fun): if (not fun): raise salt.exceptions.SaltInvocationError('Must specify a function to run!\nex: manage.up') if (fun not in lazy_obj): raise salt.exceptions.CommandExecutionError(lazy_obj.missing_fun_string(fun))
[ "def", "verify_fun", "(", "lazy_obj", ",", "fun", ")", ":", "if", "(", "not", "fun", ")", ":", "raise", "salt", ".", "exceptions", ".", "SaltInvocationError", "(", "'Must specify a function to run!\\nex: manage.up'", ")", "if", "(", "fun", "not", "in", "lazy_o...
check that the function passed really exists .
train
true
31,145
def identified_stack(handler): @policy_enforce @six.wraps(handler) def handle_stack_method(controller, req, stack_name, stack_id, **kwargs): stack_identity = identifier.HeatIdentifier(req.context.tenant_id, stack_name, stack_id) return handler(controller, req, dict(stack_identity), **kwargs) return handle_stack_method
[ "def", "identified_stack", "(", "handler", ")", ":", "@", "policy_enforce", "@", "six", ".", "wraps", "(", "handler", ")", "def", "handle_stack_method", "(", "controller", ",", "req", ",", "stack_name", ",", "stack_id", ",", "**", "kwargs", ")", ":", "stac...
decorator that passes a stack identifier instead of path components .
train
false
31,146
def oo_openshift_env(hostvars): if (not issubclass(type(hostvars), dict)): raise errors.AnsibleFilterError('|failed expects hostvars is a dict') facts = {} regex = re.compile('^openshift_.*') for key in hostvars: if regex.match(key): facts[key] = hostvars[key] migrations = {'openshift_router_selector': 'openshift_hosted_router_selector', 'openshift_registry_selector': 'openshift_hosted_registry_selector'} for (old_fact, new_fact) in migrations.items(): if ((old_fact in facts) and (new_fact not in facts)): facts[new_fact] = facts[old_fact] return facts
[ "def", "oo_openshift_env", "(", "hostvars", ")", ":", "if", "(", "not", "issubclass", "(", "type", "(", "hostvars", ")", ",", "dict", ")", ")", ":", "raise", "errors", ".", "AnsibleFilterError", "(", "'|failed expects hostvars is a dict'", ")", "facts", "=", ...
return facts which begin with "openshift_" and translate legacy facts to their openshift_env counterparts .
train
false
31,147
def ancestor(synset1, synset2): (h1, h2) = (synset1.hypernyms(recursive=True), synset2.hypernyms(recursive=True)) for s in h1: if (s in h2): return s
[ "def", "ancestor", "(", "synset1", ",", "synset2", ")", ":", "(", "h1", ",", "h2", ")", "=", "(", "synset1", ".", "hypernyms", "(", "recursive", "=", "True", ")", ",", "synset2", ".", "hypernyms", "(", "recursive", "=", "True", ")", ")", "for", "s"...
returns the common ancestor of both synsets .
train
false
31,148
def _zip_links(links, linked_paths): if (len(links) != len(linked_paths)): raise ValueError(u'Expected to receive resolved Paths per Link. Got: {} and {}'.format(links, linked_paths)) return tuple((Path(link.path, dest.dependencies[0].stat) for (link, dest) in zip(links, linked_paths) if (len(dest.dependencies) > 0)))
[ "def", "_zip_links", "(", "links", ",", "linked_paths", ")", ":", "if", "(", "len", "(", "links", ")", "!=", "len", "(", "linked_paths", ")", ")", ":", "raise", "ValueError", "(", "u'Expected to receive resolved Paths per Link. Got: {} and {}'", ".", "format", "...
given a set of paths and a resolved collection per link in the paths .
train
false
31,149
def validateBGColor(color): if (type(color) == str): if (color[0] != '#'): color = ('#' + color) if (len(color) != 7): raise ValidationException(('%r is not a valid color. Expected HTML color syntax (i.e. #RRGGBB)' % color)) try: r = int(color[1:3], 16) g = int(color[3:5], 16) b = int(color[5:7], 16) return (r, g, b, 0) except ValueError: raise ValidationException(('%r is not a valid color. Expected HTML color syntax (i.e. #RRGGBB)' % color)) elif (type(color) == tuple): if (len(color) != 4): raise ValidationException(('%r is not a valid color. Expected a 4-tuple' % (color,))) return color
[ "def", "validateBGColor", "(", "color", ")", ":", "if", "(", "type", "(", "color", ")", "==", "str", ")", ":", "if", "(", "color", "[", "0", "]", "!=", "'#'", ")", ":", "color", "=", "(", "'#'", "+", "color", ")", "if", "(", "len", "(", "colo...
bg color must be an html color .
train
false
31,150
@_docker_client def _client_wrapper(attr, *args, **kwargs): catch_api_errors = kwargs.pop('catch_api_errors', True) if ('docker.client' not in __context__): raise CommandExecutionError('Docker service not running or not installed?') func = getattr(__context__['docker.client'], attr) if (func is None): raise SaltInvocationError("Invalid client action '{0}'".format(attr)) err = '' try: return func(*args, **kwargs) except docker.errors.APIError as exc: if catch_api_errors: raise CommandExecutionError('Error {0}: {1}'.format(exc.response.status_code, exc.explanation)) else: raise except Exception as exc: err = '{0}'.format(exc) msg = 'Unable to perform {0}'.format(attr) if err: msg += ': {0}'.format(err) raise CommandExecutionError(msg)
[ "@", "_docker_client", "def", "_client_wrapper", "(", "attr", ",", "*", "args", ",", "**", "kwargs", ")", ":", "catch_api_errors", "=", "kwargs", ".", "pop", "(", "'catch_api_errors'", ",", "True", ")", "if", "(", "'docker.client'", "not", "in", "__context__...
common functionality for getting information from a container .
train
false
31,151
def _clean_pkglist(pkgs): for (name, versions) in six.iteritems(pkgs): stripped = [v for v in versions if (v != '1')] if (not stripped): pkgs[name] = ['1'] elif (versions != stripped): pkgs[name] = stripped
[ "def", "_clean_pkglist", "(", "pkgs", ")", ":", "for", "(", "name", ",", "versions", ")", "in", "six", ".", "iteritems", "(", "pkgs", ")", ":", "stripped", "=", "[", "v", "for", "v", "in", "versions", "if", "(", "v", "!=", "'1'", ")", "]", "if", ...
go through package list and .
train
false
31,152
def get_current_instance_id(): return os.environ.get('INSTANCE_ID', None)
[ "def", "get_current_instance_id", "(", ")", ":", "return", "os", ".", "environ", ".", "get", "(", "'INSTANCE_ID'", ",", "None", ")" ]
returns the id of the current instance .
train
false
31,154
def extended_blank_lines(logical_line, blank_lines, blank_before, indent_level, previous_logical): if previous_logical.startswith(u'class '): if logical_line.startswith((u'def ', u'class ', u'@')): if (indent_level and (not blank_lines) and (not blank_before)): (yield (0, u'E309 expected 1 blank line after class declaration')) elif previous_logical.startswith(u'def '): if (blank_lines and pep8.DOCSTRING_REGEX.match(logical_line)): (yield (0, u'E303 too many blank lines ({0})'.format(blank_lines))) elif pep8.DOCSTRING_REGEX.match(previous_logical): if (indent_level and (not blank_lines) and (not blank_before) and logical_line.startswith(u'def ') and (u'(self' in logical_line)): (yield (0, u'E301 expected 1 blank line, found 0'))
[ "def", "extended_blank_lines", "(", "logical_line", ",", "blank_lines", ",", "blank_before", ",", "indent_level", ",", "previous_logical", ")", ":", "if", "previous_logical", ".", "startswith", "(", "u'class '", ")", ":", "if", "logical_line", ".", "startswith", "...
check for missing blank lines after class declaration .
train
false
31,155
def CheckServiceHealth(device, logger, callback): def _OnResponse(response): response_dict = www_util.ParseJSONResponse(response) if (response_dict.get('status') == 'alert'): logger.error(_FormatServiceHealthReport(response_dict)) else: logger.info('CheckServiceHealth passed.') callback() device.SendRequest('service_health', _OnResponse, 'GET')
[ "def", "CheckServiceHealth", "(", "device", ",", "logger", ",", "callback", ")", ":", "def", "_OnResponse", "(", "response", ")", ":", "response_dict", "=", "www_util", ".", "ParseJSONResponse", "(", "response", ")", "if", "(", "response_dict", ".", "get", "...
simple scenario which pings the service_health endpoint of the service .
train
false
31,156
def _fastq_convert_fasta(in_handle, out_handle, alphabet=None): from Bio.SeqIO.QualityIO import FastqGeneralIterator count = 0 for (title, seq, qual) in FastqGeneralIterator(in_handle): count += 1 out_handle.write(('>%s\n' % title)) for i in range(0, len(seq), 60): out_handle.write((seq[i:(i + 60)] + '\n')) return count
[ "def", "_fastq_convert_fasta", "(", "in_handle", ",", "out_handle", ",", "alphabet", "=", "None", ")", ":", "from", "Bio", ".", "SeqIO", ".", "QualityIO", "import", "FastqGeneralIterator", "count", "=", "0", "for", "(", "title", ",", "seq", ",", "qual", ")...
fast fastq to fasta conversion .
train
false
31,157
def _create_with_wait(snapshot, wait_timeout_secs, sleep_func=time.sleep): time_waited = 0 snapshot.update() while (snapshot.status != 'completed'): sleep_func(3) snapshot.update() time_waited += 3 if (wait_timeout_secs and (time_waited > wait_timeout_secs)): return False return True
[ "def", "_create_with_wait", "(", "snapshot", ",", "wait_timeout_secs", ",", "sleep_func", "=", "time", ".", "sleep", ")", ":", "time_waited", "=", "0", "snapshot", ".", "update", "(", ")", "while", "(", "snapshot", ".", "status", "!=", "'completed'", ")", ...
wait for the snapshot to be created .
train
false
31,158
def valid_filename(filename): for postfix in IGNORED_POSTFIXES: if filename.endswith(postfix): return False for directory in IGNORED_DIRECTORIES: if filename.startswith(directory): return False return (filename.endswith('.py') and (filename not in IGNORED_FILES))
[ "def", "valid_filename", "(", "filename", ")", ":", "for", "postfix", "in", "IGNORED_POSTFIXES", ":", "if", "filename", ".", "endswith", "(", "postfix", ")", ":", "return", "False", "for", "directory", "in", "IGNORED_DIRECTORIES", ":", "if", "filename", ".", ...
checks if a file is a python file and is not ignored .
train
false
31,159
def confidence_interval_continuous(point_estimate, stddev, sample_size, confidence=0.95, **kwargs): alpha = ppf(((confidence + 1) / 2), (sample_size - 1)) margin = (stddev / sqrt(sample_size)) return ((point_estimate - (alpha * margin)), (point_estimate + (alpha * margin)))
[ "def", "confidence_interval_continuous", "(", "point_estimate", ",", "stddev", ",", "sample_size", ",", "confidence", "=", "0.95", ",", "**", "kwargs", ")", ":", "alpha", "=", "ppf", "(", "(", "(", "confidence", "+", "1", ")", "/", "2", ")", ",", "(", ...
continuous confidence interval from sample size and standard error .
train
true
31,160
def test_gate(): h = HadamardGate(1) assert (h.min_qubits == 2) assert (h.nqubits == 1) i0 = Wild('i0') i1 = Wild('i1') h0_w1 = HadamardGate(i0) h0_w2 = HadamardGate(i0) h1_w1 = HadamardGate(i1) assert (h0_w1 == h0_w2) assert (h0_w1 != h1_w1) assert (h1_w1 != h0_w2) cnot_10_w1 = CNOT(i1, i0) cnot_10_w2 = CNOT(i1, i0) cnot_01_w1 = CNOT(i0, i1) assert (cnot_10_w1 == cnot_10_w2) assert (cnot_10_w1 != cnot_01_w1) assert (cnot_10_w2 != cnot_01_w1)
[ "def", "test_gate", "(", ")", ":", "h", "=", "HadamardGate", "(", "1", ")", "assert", "(", "h", ".", "min_qubits", "==", "2", ")", "assert", "(", "h", ".", "nqubits", "==", "1", ")", "i0", "=", "Wild", "(", "'i0'", ")", "i1", "=", "Wild", "(", ...
test a basic gate .
train
false
31,162
def _getgroups(uid): result = [] pwent = pwd.getpwuid(uid) result.append(pwent.pw_gid) for grent in grp.getgrall(): if (pwent.pw_name in grent.gr_mem): result.append(grent.gr_gid) return result
[ "def", "_getgroups", "(", "uid", ")", ":", "result", "=", "[", "]", "pwent", "=", "pwd", ".", "getpwuid", "(", "uid", ")", "result", ".", "append", "(", "pwent", ".", "pw_gid", ")", "for", "grent", "in", "grp", ".", "getgrall", "(", ")", ":", "if...
return the primary and supplementary groups for the given uid .
train
false
31,163
def document_load_reload_action(section, action_name, resource_name, event_emitter, load_model, service_model, include_signature=True): description = ('Calls :py:meth:`%s.Client.%s` to update the attributes of the %s resource. Note that the load and reload methods are the same method and can be used interchangeably.' % (get_service_module_name(service_model), xform_name(load_model.request.operation), resource_name)) example_resource_name = xform_name(resource_name) if (service_model.service_name == resource_name): example_resource_name = resource_name example_prefix = ('%s.%s' % (example_resource_name, action_name)) document_model_driven_method(section=section, method_name=action_name, operation_model=OperationModel({}, service_model), event_emitter=event_emitter, method_description=description, example_prefix=example_prefix, include_signature=include_signature)
[ "def", "document_load_reload_action", "(", "section", ",", "action_name", ",", "resource_name", ",", "event_emitter", ",", "load_model", ",", "service_model", ",", "include_signature", "=", "True", ")", ":", "description", "=", "(", "'Calls :py:meth:`%s.Client.%s` to u...
documents the resource load action .
train
false
31,164
def cookie_encode(data, key): msg = base64.b64encode(pickle.dumps(data, (-1))) sig = base64.b64encode(hmac.new(tob(key), msg).digest()) return (((tob('!') + sig) + tob('?')) + msg)
[ "def", "cookie_encode", "(", "data", ",", "key", ")", ":", "msg", "=", "base64", ".", "b64encode", "(", "pickle", ".", "dumps", "(", "data", ",", "(", "-", "1", ")", ")", ")", "sig", "=", "base64", ".", "b64encode", "(", "hmac", ".", "new", "(", ...
encode and sign a pickle-able object .
train
true
31,165
def vsepr_parse_user_answer(user_input): return json.loads(user_input)
[ "def", "vsepr_parse_user_answer", "(", "user_input", ")", ":", "return", "json", ".", "loads", "(", "user_input", ")" ]
user_input is json generated by vsepr .
train
false
31,166
def validate_user_access_to_subscribers(user_profile, stream): validate_user_access_to_subscribers_helper(user_profile, {'realm__domain': stream.realm.domain, 'realm_id': stream.realm_id, 'invite_only': stream.invite_only}, (lambda : subscribed_to_stream(user_profile, stream)))
[ "def", "validate_user_access_to_subscribers", "(", "user_profile", ",", "stream", ")", ":", "validate_user_access_to_subscribers_helper", "(", "user_profile", ",", "{", "'realm__domain'", ":", "stream", ".", "realm", ".", "domain", ",", "'realm_id'", ":", "stream", "....
validates whether the user can view the subscribers of a stream .
train
false
31,167
def copy_database_template(source, db_path): db_path_dir = os.path.dirname(db_path) if (not os.path.exists(db_path_dir)): os.makedirs(db_path_dir) if os.path.exists(source): shutil.copy(source, db_path) assert os.path.exists(db_path) elif source.lower().startswith(('http://', 'https://', 'ftp://')): download_to_file(source, db_path) else: raise Exception(('Failed to copy database template from source %s' % source))
[ "def", "copy_database_template", "(", "source", ",", "db_path", ")", ":", "db_path_dir", "=", "os", ".", "path", ".", "dirname", "(", "db_path", ")", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "db_path_dir", ")", ")", ":", "os", ".", "m...
copy a clean sqlite template database .
train
false
31,168
def is_item_in_course_tree(item): ancestor = item.get_parent() while ((ancestor is not None) and (ancestor.location.category != 'course')): ancestor = ancestor.get_parent() return (ancestor is not None)
[ "def", "is_item_in_course_tree", "(", "item", ")", ":", "ancestor", "=", "item", ".", "get_parent", "(", ")", "while", "(", "(", "ancestor", "is", "not", "None", ")", "and", "(", "ancestor", ".", "location", ".", "category", "!=", "'course'", ")", ")", ...
check that the item is in the course tree .
train
false
31,169
def local_git_branch(repo_url, branch_name): with lcd(LOGDIR): local('if [ -d letsencrypt ]; then rm -rf letsencrypt; fi') local(('git clone %s letsencrypt --branch %s --single-branch' % (repo_url, branch_name))) local('tar czf le.tar.gz letsencrypt')
[ "def", "local_git_branch", "(", "repo_url", ",", "branch_name", ")", ":", "with", "lcd", "(", "LOGDIR", ")", ":", "local", "(", "'if [ -d letsencrypt ]; then rm -rf letsencrypt; fi'", ")", "local", "(", "(", "'git clone %s letsencrypt --branch %s --single-branch'", "%", ...
clones branch <branch_name> of repo_url .
train
false
31,170
def get_duty_officers(at_time=None): duty_officers = [] if (not at_time): at_time = timezone.now() current_shifts = Shift.objects.filter(deleted=False, start__lt=at_time, end__gt=at_time) if current_shifts: duty_officers = [shift.user for shift in current_shifts] return duty_officers else: try: u = UserProfile.objects.get(fallback_alert_user=True) return [u.user] except UserProfile.DoesNotExist: return []
[ "def", "get_duty_officers", "(", "at_time", "=", "None", ")", ":", "duty_officers", "=", "[", "]", "if", "(", "not", "at_time", ")", ":", "at_time", "=", "timezone", ".", "now", "(", ")", "current_shifts", "=", "Shift", ".", "objects", ".", "filter", "...
returns a list of duty officers for a given time or now if none given .
train
false
31,172
def toggle_switch(name, active=True): (switch, created) = Switch.objects.get_or_create(name=name, defaults={'active': active}) if (not created): switch.active = (not switch.active) switch.save() return switch
[ "def", "toggle_switch", "(", "name", ",", "active", "=", "True", ")", ":", "(", "switch", ",", "created", ")", "=", "Switch", ".", "objects", ".", "get_or_create", "(", "name", "=", "name", ",", "defaults", "=", "{", "'active'", ":", "active", "}", "...
activate or deactivate a waffle switch .
train
false
31,174
def _agreement_context(request): request_path = request.META['PATH_INFO'] nocheck = filter((lambda x: request_path.startswith(x)), settings.POOTLE_LEGALPAGE_NOCHECK_PREFIXES) if (request.user.is_authenticated and (not nocheck) and LegalPage.objects.has_pending_agreement(request.user)): return True return False
[ "def", "_agreement_context", "(", "request", ")", ":", "request_path", "=", "request", ".", "META", "[", "'PATH_INFO'", "]", "nocheck", "=", "filter", "(", "(", "lambda", "x", ":", "request_path", ".", "startswith", "(", "x", ")", ")", ",", "settings", "...
returns whether the agreement box should be displayed or not .
train
false
31,175
def send_pm_if_empty_stream(sender, stream, stream_name, realm): if (sender.realm.is_zephyr_mirror_realm or sender.realm.deactivated): return if ((not sender.is_bot) or (sender.bot_owner is None)): return if (sender.realm != realm): return if (stream is not None): num_subscribers = stream.num_subscribers() if (num_subscribers > 0): return last_reminder = sender.last_reminder waitperiod = datetime.timedelta(minutes=UserProfile.BOT_OWNER_STREAM_ALERT_WAITPERIOD) if (last_reminder and ((timezone.now() - last_reminder) <= waitperiod)): return if (stream is None): error_msg = 'that stream does not yet exist. To create it, ' else: error_msg = 'there are no subscribers to that stream. To join it, ' content = ("Hi there! We thought you'd like to know that your bot **%s** just tried to send a message to stream `%s`, but %sclick the gear in the left-side stream list." % (sender.full_name, stream_name, error_msg)) message = internal_prep_message(realm, settings.NOTIFICATION_BOT, 'private', sender.bot_owner.email, '', content) do_send_messages([message]) sender.last_reminder = timezone.now() sender.save(update_fields=['last_reminder'])
[ "def", "send_pm_if_empty_stream", "(", "sender", ",", "stream", ",", "stream_name", ",", "realm", ")", ":", "if", "(", "sender", ".", "realm", ".", "is_zephyr_mirror_realm", "or", "sender", ".", "realm", ".", "deactivated", ")", ":", "return", "if", "(", "...
if a bot sends a message to a stream that doesnt exist or has no subscribers .
train
false
31,177
def window_specialization(typename): return Extension('zipline.lib._{name}window'.format(name=typename), ['zipline/lib/_{name}window.pyx'.format(name=typename)], depends=['zipline/lib/_windowtemplate.pxi'])
[ "def", "window_specialization", "(", "typename", ")", ":", "return", "Extension", "(", "'zipline.lib._{name}window'", ".", "format", "(", "name", "=", "typename", ")", ",", "[", "'zipline/lib/_{name}window.pyx'", ".", "format", "(", "name", "=", "typename", ")", ...
make an extension for an adjustedarraywindow specialization .
train
true
31,178
def test_parse_null_as_none(): initialize() yamlfile = '{\n "model": !obj:pylearn2.models.autoencoder.Autoencoder {\n\n "nvis" : 1024,\n "nhid" : 64,\n "act_enc" : Null,\n "act_dec" : null\n\n }\n }' load(yamlfile)
[ "def", "test_parse_null_as_none", "(", ")", ":", "initialize", "(", ")", "yamlfile", "=", "'{\\n \"model\": !obj:pylearn2.models.autoencoder.Autoencoder {\\n\\n \"nvis\" : 1024,\\n \"nhid\" : 64,\\n \"act_enc\" : Null,\\n ...
tests whether none may be passed via yaml kwarg null .
train
false
31,179
def _convert_coil_trans(coil_trans, dev_ctf_t, bti_dev_t): t = combine_transforms(invert_transform(dev_ctf_t), bti_dev_t, 'ctf_head', 'meg') t = np.dot(t['trans'], coil_trans) return t
[ "def", "_convert_coil_trans", "(", "coil_trans", ",", "dev_ctf_t", ",", "bti_dev_t", ")", ":", "t", "=", "combine_transforms", "(", "invert_transform", "(", "dev_ctf_t", ")", ",", "bti_dev_t", ",", "'ctf_head'", ",", "'meg'", ")", "t", "=", "np", ".", "dot",...
convert the coil trans .
train
false
31,180
def _config_to_python(v): if (v in (u'true', u'yes')): v = True elif (v in (u'false', u'no')): v = False else: try: v = int(v) except ValueError: pass return v
[ "def", "_config_to_python", "(", "v", ")", ":", "if", "(", "v", "in", "(", "u'true'", ",", "u'yes'", ")", ")", ":", "v", "=", "True", "elif", "(", "v", "in", "(", "u'false'", ",", "u'no'", ")", ")", ":", "v", "=", "False", "else", ":", "try", ...
convert a git config string into a python value .
train
false
31,181
def write_ceps(ceps, fn): (base_fn, ext) = os.path.splitext(fn) data_fn = (base_fn + '.ceps') np.save(data_fn, ceps) print ('Written %s' % data_fn)
[ "def", "write_ceps", "(", "ceps", ",", "fn", ")", ":", "(", "base_fn", ",", "ext", ")", "=", "os", ".", "path", ".", "splitext", "(", "fn", ")", "data_fn", "=", "(", "base_fn", "+", "'.ceps'", ")", "np", ".", "save", "(", "data_fn", ",", "ceps", ...
write the mfcc to separate files to speed up processing .
train
false
31,187
@cached_function def get_cython_cache_dir(): if ('CYTHON_CACHE_DIR' in os.environ): return os.environ['CYTHON_CACHE_DIR'] parent = None if (os.name == 'posix'): if (sys.platform == 'darwin'): parent = os.path.expanduser('~/Library/Caches') else: parent = os.environ.get('XDG_CACHE_HOME') if (parent and os.path.isdir(parent)): return os.path.join(parent, 'cython') return os.path.expanduser(os.path.join('~', '.cython'))
[ "@", "cached_function", "def", "get_cython_cache_dir", "(", ")", ":", "if", "(", "'CYTHON_CACHE_DIR'", "in", "os", ".", "environ", ")", ":", "return", "os", ".", "environ", "[", "'CYTHON_CACHE_DIR'", "]", "parent", "=", "None", "if", "(", "os", ".", "name"...
get the cython cache dir priority: 1 .
train
false
31,188
def call_each(seq): try: reduce((lambda _, y: y()), seq) except TypeError as e: if (text_type(e) != 'reduce() of empty sequence with no initial value'): raise
[ "def", "call_each", "(", "seq", ")", ":", "try", ":", "reduce", "(", "(", "lambda", "_", ",", "y", ":", "y", "(", ")", ")", ",", "seq", ")", "except", "TypeError", "as", "e", ":", "if", "(", "text_type", "(", "e", ")", "!=", "'reduce() of empty s...
calls each element of sequence to invoke the side effect .
train
true
31,189
def _Popen(command, output=False, directory='.', runas=None, env=(), exitcode=0, use_vt=False, loglevel=None): ret = None directory = os.path.abspath(directory) if isinstance(command, list): command = ' '.join(command) LOG.debug(u'Running {0}'.format(command)) if (not loglevel): loglevel = 'debug' ret = __salt__['cmd.run_all'](command, cwd=directory, output_loglevel=loglevel, runas=runas, env=env, use_vt=use_vt, python_shell=False) out = ((ret['stdout'] + '\n\n') + ret['stderr']) if ((exitcode is not None) and (ret['retcode'] != exitcode)): raise _BuildoutError(out) ret['output'] = out if output: ret = out return ret
[ "def", "_Popen", "(", "command", ",", "output", "=", "False", ",", "directory", "=", "'.'", ",", "runas", "=", "None", ",", "env", "=", "(", ")", ",", "exitcode", "=", "0", ",", "use_vt", "=", "False", ",", "loglevel", "=", "None", ")", ":", "ret...
run a command .
train
true
31,190
def get_default_options(): options = [] home = os.environ.get('HOME', '') if home: rcfile = os.path.join(home, RCFILE) try: options = open(rcfile).read().split() except IOError: pass return options
[ "def", "get_default_options", "(", ")", ":", "options", "=", "[", "]", "home", "=", "os", ".", "environ", ".", "get", "(", "'HOME'", ",", "''", ")", "if", "home", ":", "rcfile", "=", "os", ".", "path", ".", "join", "(", "home", ",", "RCFILE", ")"...
read config file and return list of options .
train
true
31,191
def index_alt(): s3_redirect_default(URL(f='person'))
[ "def", "index_alt", "(", ")", ":", "s3_redirect_default", "(", "URL", "(", "f", "=", "'person'", ")", ")" ]
module homepage for non-admin users when no cms content found .
train
false
31,192
def headers_to_dict(header_lines): headers = {} headers_list = [x for line in header_lines for x in line.split(': ', 1)] headers_dict = dict(zip(headers_list[0::2], headers_list[1::2])) for header in headers_dict: headers[header.lower()] = headers_dict[header] return headers
[ "def", "headers_to_dict", "(", "header_lines", ")", ":", "headers", "=", "{", "}", "headers_list", "=", "[", "x", "for", "line", "in", "header_lines", "for", "x", "in", "line", ".", "split", "(", "': '", ",", "1", ")", "]", "headers_dict", "=", "dict",...
convert the list of header lines into a dictionary .
train
false
31,193
def _sparse_series_to_coo(ss, row_levels=(0,), column_levels=(1,), sort_labels=False): import scipy.sparse if (ss.index.nlevels < 2): raise ValueError('to_coo requires MultiIndex with nlevels > 2') if (not ss.index.is_unique): raise ValueError('Duplicate index entries are not allowed in to_coo transformation.') row_levels = [ss.index._get_level_number(x) for x in row_levels] column_levels = [ss.index._get_level_number(x) for x in column_levels] (v, i, j, rows, columns) = _to_ijv(ss, row_levels=row_levels, column_levels=column_levels, sort_labels=sort_labels) sparse_matrix = scipy.sparse.coo_matrix((v, (i, j)), shape=(len(rows), len(columns))) return (sparse_matrix, rows, columns)
[ "def", "_sparse_series_to_coo", "(", "ss", ",", "row_levels", "=", "(", "0", ",", ")", ",", "column_levels", "=", "(", "1", ",", ")", ",", "sort_labels", "=", "False", ")", ":", "import", "scipy", ".", "sparse", "if", "(", "ss", ".", "index", ".", ...
convert a sparseseries to a scipy .
train
true
31,194
def DNSServiceCreateConnection(): _global_lock.acquire() try: sdRef = _DNSServiceCreateConnection() finally: _global_lock.release() return sdRef
[ "def", "DNSServiceCreateConnection", "(", ")", ":", "_global_lock", ".", "acquire", "(", ")", "try", ":", "sdRef", "=", "_DNSServiceCreateConnection", "(", ")", "finally", ":", "_global_lock", ".", "release", "(", ")", "return", "sdRef" ]
create a connection to the daemon allowing efficient registration of multiple individual records .
train
false
31,197
def request_feed(url, **kwargs): response = request_response(url, **kwargs) if (response is not None): return feedparser.parse(response.content)
[ "def", "request_feed", "(", "url", ",", "**", "kwargs", ")", ":", "response", "=", "request_response", "(", "url", ",", "**", "kwargs", ")", "if", "(", "response", "is", "not", "None", ")", ":", "return", "feedparser", ".", "parse", "(", "response", "....
wrapper for request_response .
train
false
31,198
def generateCertificateFiles(basename, organization, organizationalUnit): (pkey, req, cert) = generateCertificateObjects(organization, organizationalUnit) for (ext, obj, dumpFunc) in [('key', pkey, crypto.dump_privatekey), ('req', req, crypto.dump_certificate_request), ('cert', cert, crypto.dump_certificate)]: fName = os.extsep.join((basename, ext)).encode('utf-8') FilePath(fName).setContent(dumpFunc(crypto.FILETYPE_PEM, obj))
[ "def", "generateCertificateFiles", "(", "basename", ",", "organization", ",", "organizationalUnit", ")", ":", "(", "pkey", ",", "req", ",", "cert", ")", "=", "generateCertificateObjects", "(", "organization", ",", "organizationalUnit", ")", "for", "(", "ext", ",...
create certificate files key .
train
false
31,200
def _writeGifToFile(fp, images, durations, loops): frames = 0 previous = None for im in images: if (not previous): palette = getheader(im)[1] data = getdata(im) (imdes, data) = (data[0], data[1:]) header = getheaderAnim(im) appext = getAppExt(loops) graphext = getGraphicsControlExt(durations[0]) fp.write(header) fp.write(palette) fp.write(appext) fp.write(graphext) fp.write(imdes) for d in data: fp.write(d) else: data = getdata(im) (imdes, data) = (data[0], data[1:]) graphext = getGraphicsControlExt(durations[frames]) fp.write(graphext) fp.write(imdes) for d in data: fp.write(d) previous = im.copy() frames = (frames + 1) fp.write(';') return frames
[ "def", "_writeGifToFile", "(", "fp", ",", "images", ",", "durations", ",", "loops", ")", ":", "frames", "=", "0", "previous", "=", "None", "for", "im", "in", "images", ":", "if", "(", "not", "previous", ")", ":", "palette", "=", "getheader", "(", "im...
given a set of images writes the bytes to the specified stream .
train
false
31,201
def raise_msg_to_str(msg): if (not is_string_like(msg)): msg = '\n'.join(map(str, msg)) return msg
[ "def", "raise_msg_to_str", "(", "msg", ")", ":", "if", "(", "not", "is_string_like", "(", "msg", ")", ")", ":", "msg", "=", "'\\n'", ".", "join", "(", "map", "(", "str", ",", "msg", ")", ")", "return", "msg" ]
msg is a return arg from a raise .
train
true
31,202
def vbox(margin, spacing, *items): return box(QtWidgets.QVBoxLayout, margin, spacing, *items)
[ "def", "vbox", "(", "margin", ",", "spacing", ",", "*", "items", ")", ":", "return", "box", "(", "QtWidgets", ".", "QVBoxLayout", ",", "margin", ",", "spacing", ",", "*", "items", ")" ]
create a vboxlayout with the specified sizes and items .
train
false
31,203
def apply_all_middleware(request, **attrs): request = apply_view_middleware(apply_request_middleware(request)) for (key, value) in attrs.items(): setattr(request, key, value) return request
[ "def", "apply_all_middleware", "(", "request", ",", "**", "attrs", ")", ":", "request", "=", "apply_view_middleware", "(", "apply_request_middleware", "(", "request", ")", ")", "for", "(", "key", ",", "value", ")", "in", "attrs", ".", "items", "(", ")", ":...
apply all the process_request and process_view capable middleware configured into the given request .
train
false
31,204
@snippet def instantiate_client(_unused_client, _unused_to_delete): from google.cloud import logging client = logging.Client() credentials = object() from google.cloud import logging client = logging.Client(project='my-project', credentials=credentials)
[ "@", "snippet", "def", "instantiate_client", "(", "_unused_client", ",", "_unused_to_delete", ")", ":", "from", "google", ".", "cloud", "import", "logging", "client", "=", "logging", ".", "Client", "(", ")", "credentials", "=", "object", "(", ")", "from", "g...
instantiate client .
train
true
31,205
def mkpixmap(w, h, fmt, data): fmtinfo = _fmt_to_mac[fmt] rv = struct.pack('lHhhhhhhlllhhhhlll', (id(data) + MacOS.string_id_to_buffer), ((w * 2) + 32768), 0, 0, h, w, 0, 0, 0, (72 << 16), (72 << 16), fmtinfo[0], fmtinfo[1], fmtinfo[2], fmtinfo[3], 0, 0, 0) return Qd.RawBitMap(rv)
[ "def", "mkpixmap", "(", "w", ",", "h", ",", "fmt", ",", "data", ")", ":", "fmtinfo", "=", "_fmt_to_mac", "[", "fmt", "]", "rv", "=", "struct", ".", "pack", "(", "'lHhhhhhhlllhhhhlll'", ",", "(", "id", "(", "data", ")", "+", "MacOS", ".", "string_id...
kludge a pixmap together .
train
false
31,206
def pathname(): pname = u'' for __ in xrange(random.randint(2, 3)): for __ in xrange(random.randint(5, 10)): pname += random.choice(NAME_CHARS) pname += '/' return pname
[ "def", "pathname", "(", ")", ":", "pname", "=", "u''", "for", "__", "in", "xrange", "(", "random", ".", "randint", "(", "2", ",", "3", ")", ")", ":", "for", "__", "in", "xrange", "(", "random", ".", "randint", "(", "5", ",", "10", ")", ")", "...
fake a pathname .
train
false
31,207
def python_branch(): return _sys_version()[2]
[ "def", "python_branch", "(", ")", ":", "return", "_sys_version", "(", ")", "[", "2", "]" ]
returns a string identifying the python implementation branch .
train
false
31,208
def is_form_media_type(media_type): (base_media_type, params) = parse_header(media_type.encode(HTTP_HEADER_ENCODING)) return ((base_media_type == u'application/x-www-form-urlencoded') or (base_media_type == u'multipart/form-data'))
[ "def", "is_form_media_type", "(", "media_type", ")", ":", "(", "base_media_type", ",", "params", ")", "=", "parse_header", "(", "media_type", ".", "encode", "(", "HTTP_HEADER_ENCODING", ")", ")", "return", "(", "(", "base_media_type", "==", "u'application/x-www-fo...
return true if the media type is a valid form media type .
train
false
31,210
def _pad_message(text): block_size = AES.block_size padding_size = ((block_size - (len(text) % block_size)) or block_size) padding = (chr(padding_size) * padding_size) return (text + padding)
[ "def", "_pad_message", "(", "text", ")", ":", "block_size", "=", "AES", ".", "block_size", "padding_size", "=", "(", "(", "block_size", "-", "(", "len", "(", "text", ")", "%", "block_size", ")", ")", "or", "block_size", ")", "padding", "=", "(", "chr",...
return text padded out to a multiple of block_size bytes .
train
false
31,211
def _create_siv_cipher(factory, **kwargs): try: key = kwargs.pop('key') except KeyError as e: raise TypeError(('Missing parameter: ' + str(e))) nonce = kwargs.pop('nonce', None) return SivMode(factory, key, nonce, kwargs)
[ "def", "_create_siv_cipher", "(", "factory", ",", "**", "kwargs", ")", ":", "try", ":", "key", "=", "kwargs", ".", "pop", "(", "'key'", ")", "except", "KeyError", "as", "e", ":", "raise", "TypeError", "(", "(", "'Missing parameter: '", "+", "str", "(", ...
create a new block cipher .
train
false
31,212
def length_lt(value, arg): return (len(value) < int(arg))
[ "def", "length_lt", "(", "value", ",", "arg", ")", ":", "return", "(", "len", "(", "value", ")", "<", "int", "(", "arg", ")", ")" ]
returns a boolean of whether the values length is less than the argument .
train
false
31,214
def _interpret_emr_step_syslog(fs, matches): errors = [] result = {} for match in matches: path = match['path'] interpretation = _parse_step_syslog(_cat_log(fs, path)) result.update(interpretation) for error in (result.get('errors') or ()): if ('hadoop_error' in error): error['hadoop_error']['path'] = path _add_implied_task_id(error) errors.append(error) _add_implied_job_id(result) if errors: result['errors'] = errors return result
[ "def", "_interpret_emr_step_syslog", "(", "fs", ",", "matches", ")", ":", "errors", "=", "[", "]", "result", "=", "{", "}", "for", "match", "in", "matches", ":", "path", "=", "match", "[", "'path'", "]", "interpretation", "=", "_parse_step_syslog", "(", ...
extract information from step syslog (see :py:func:_parse_step_log()) .
train
false
31,215
def IQR(xs): cdf = Cdf(xs) return (cdf.Value(0.25), cdf.Value(0.75))
[ "def", "IQR", "(", "xs", ")", ":", "cdf", "=", "Cdf", "(", "xs", ")", "return", "(", "cdf", ".", "Value", "(", "0.25", ")", ",", "cdf", ".", "Value", "(", "0.75", ")", ")" ]
computes the interquartile of a sequence .
train
false
31,217
def get_file_original_name(name): re_match = re.findall('.*_sep_(.*)', name) if re_match: return re_match[0] else: return 'Not valid'
[ "def", "get_file_original_name", "(", "name", ")", ":", "re_match", "=", "re", ".", "findall", "(", "'.*_sep_(.*)'", ",", "name", ")", "if", "re_match", ":", "return", "re_match", "[", "0", "]", "else", ":", "return", "'Not valid'" ]
use this function to get the users original filename .
train
false
31,218
def test_path_issues(): source = 'from datetime import ' assert jedi.Script(source).completions()
[ "def", "test_path_issues", "(", ")", ":", "source", "=", "'from datetime import '", "assert", "jedi", ".", "Script", "(", "source", ")", ".", "completions", "(", ")" ]
see pull request #684 for details .
train
false
31,219
def mmodule(saltenv, fun, *args, **kwargs): mminion = _MMinion(saltenv) return mminion.functions[fun](*args, **kwargs)
[ "def", "mmodule", "(", "saltenv", ",", "fun", ",", "*", "args", ",", "**", "kwargs", ")", ":", "mminion", "=", "_MMinion", "(", "saltenv", ")", "return", "mminion", ".", "functions", "[", "fun", "]", "(", "*", "args", ",", "**", "kwargs", ")" ]
loads minion modules from an environment so that they can be used in pillars for that environment cli example: .
train
true
31,220
def write_git_changelog(): new_changelog = 'ChangeLog' git_dir = _get_git_directory() if (not os.getenv('SKIP_WRITE_GIT_CHANGELOG')): if git_dir: git_log_cmd = ('git --git-dir=%s log' % git_dir) changelog = _run_shell_command(git_log_cmd) mailmap = _parse_git_mailmap(git_dir) with open(new_changelog, 'w') as changelog_file: changelog_file.write(canonicalize_emails(changelog, mailmap)) else: open(new_changelog, 'w').close()
[ "def", "write_git_changelog", "(", ")", ":", "new_changelog", "=", "'ChangeLog'", "git_dir", "=", "_get_git_directory", "(", ")", "if", "(", "not", "os", ".", "getenv", "(", "'SKIP_WRITE_GIT_CHANGELOG'", ")", ")", ":", "if", "git_dir", ":", "git_log_cmd", "=",...
write a changelog based on the git changelog .
train
false
31,221
def _set_status(m, comment=INVALID_RESPONSE, status=False, out=None): m['out'] = out m['status'] = status m['logs'] = LOG.messages[:] m['logs_by_level'] = LOG.by_level.copy() (outlog, outlog_by_level) = ('', '') m['comment'] = comment if (out and isinstance(out, string_types)): outlog += HR outlog += 'OUTPUT:\n' outlog += '{0}\n'.format(_encode_string(out)) outlog += HR if m['logs']: outlog += HR outlog += 'Log summary:\n' outlog += HR outlog_by_level += HR outlog_by_level += 'Log summary by level:\n' outlog_by_level += HR for (level, msg) in m['logs']: outlog += '\n{0}: {1}\n'.format(level.upper(), _encode_string(msg)) for logger in ('error', 'warn', 'info', 'debug'): logs = m['logs_by_level'].get(logger, []) if logs: outlog_by_level += '\n{0}:\n'.format(logger.upper()) for (idx, log) in enumerate(logs[:]): logs[idx] = _encode_string(log) outlog_by_level += '\n'.join(logs) outlog_by_level += '\n' outlog += HR m['outlog'] = outlog m['outlog_by_level'] = outlog_by_level return _encode_status(m)
[ "def", "_set_status", "(", "m", ",", "comment", "=", "INVALID_RESPONSE", ",", "status", "=", "False", ",", "out", "=", "None", ")", ":", "m", "[", "'out'", "]", "=", "out", "m", "[", "'status'", "]", "=", "status", "m", "[", "'logs'", "]", "=", "...
assign status data to a dict .
train
true
31,222
def _setlabel(object_alias, index): finder = _getfinder() args = {} attrs = {} _code = 'core' _subcode = 'setd' aeobj_0 = aetypes.ObjectSpecifier(want=aetypes.Type('prop'), form='alis', seld=object_alias, fr=None) aeobj_1 = aetypes.ObjectSpecifier(want=aetypes.Type('prop'), form='prop', seld=aetypes.Type('labi'), fr=aeobj_0) args['----'] = aeobj_1 args['data'] = index (_reply, args, attrs) = finder.send(_code, _subcode, args, attrs) if ('errn' in args): raise Error, aetools.decodeerror(args) return index
[ "def", "_setlabel", "(", "object_alias", ",", "index", ")", ":", "finder", "=", "_getfinder", "(", ")", "args", "=", "{", "}", "attrs", "=", "{", "}", "_code", "=", "'core'", "_subcode", "=", "'setd'", "aeobj_0", "=", "aetypes", ".", "ObjectSpecifier", ...
label: set the label for the object .
train
false
31,223
def prepare_private_key(): if (request.method != 'GET'): return if request.args.get('view_only', ''): return if request.referrer: referrer_parsed = urlparse.urlparse(request.referrer) scheme = referrer_parsed.scheme key = urlparse.parse_qs(urlparse.urlparse(request.referrer).query).get('view_only') if key: key = key[0] else: scheme = None key = None if (key and (not session.is_authenticated)): new_url = add_key_to_url(request.url, scheme, key) return redirect(new_url, code=http.TEMPORARY_REDIRECT)
[ "def", "prepare_private_key", "(", ")", ":", "if", "(", "request", ".", "method", "!=", "'GET'", ")", ":", "return", "if", "request", ".", "args", ".", "get", "(", "'view_only'", ",", "''", ")", ":", "return", "if", "request", ".", "referrer", ":", "...
before_request handler that checks the referer header to see if the user is requesting from a view-only link .
train
false
31,224
def ValidateCombinedSourceReferencesString(source_refs): if (len(source_refs) > SOURCE_REFERENCES_MAX_SIZE): raise validation.ValidationError(('Total source reference(s) size exceeds the limit: %d > %d' % (len(source_refs), SOURCE_REFERENCES_MAX_SIZE))) for ref in source_refs.splitlines(): ValidateSourceReference(ref.strip())
[ "def", "ValidateCombinedSourceReferencesString", "(", "source_refs", ")", ":", "if", "(", "len", "(", "source_refs", ")", ">", "SOURCE_REFERENCES_MAX_SIZE", ")", ":", "raise", "validation", ".", "ValidationError", "(", "(", "'Total source reference(s) size exceeds the lim...
determines if source_refs contains a valid list of source references .
train
false
31,225
@pytest.mark.parametrize('parallel', [True, False]) def test_conversion(parallel, read_basic): text = '\nA B C D E\n1 a 3 4 5\n2. 1 9 10 -5.3e4\n4 2 -12 .4 six\n' table = read_basic(text, parallel=parallel) assert_equal(table['A'].dtype.kind, 'f') assert (table['B'].dtype.kind in ('S', 'U')) assert_equal(table['C'].dtype.kind, 'i') assert_equal(table['D'].dtype.kind, 'f') assert (table['E'].dtype.kind in ('S', 'U'))
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'parallel'", ",", "[", "True", ",", "False", "]", ")", "def", "test_conversion", "(", "parallel", ",", "read_basic", ")", ":", "text", "=", "'\\nA B C D E\\n1 a 3 4 5\\n2. 1 9 10 -5.3e4\\n4 2 -12 .4 six\\n'", "...
the reader should try to convert each column to ints .
train
false
31,226
def split_ranges(mask): ranges = [(0, len(mask))] for (pos, val) in enumerate(mask): if (not val): r = ranges.pop() if (pos > r[0]): (yield (r[0], pos)) if ((pos + 1) < len(mask)): ranges.append(((pos + 1), len(mask))) if ranges: (yield ranges[(-1)])
[ "def", "split_ranges", "(", "mask", ")", ":", "ranges", "=", "[", "(", "0", ",", "len", "(", "mask", ")", ")", "]", "for", "(", "pos", ",", "val", ")", "in", "enumerate", "(", "mask", ")", ":", "if", "(", "not", "val", ")", ":", "r", "=", "...
generates tuples of ranges which cover all true value in mask .
train
false
31,229
def revoke_privilege(database, privilege, username, **client_args): client = _client(**client_args) client.revoke_privilege(privilege, database, username) return True
[ "def", "revoke_privilege", "(", "database", ",", "privilege", ",", "username", ",", "**", "client_args", ")", ":", "client", "=", "_client", "(", "**", "client_args", ")", "client", ".", "revoke_privilege", "(", "privilege", ",", "database", ",", "username", ...
revoke a privilege on a database from a user .
train
true
31,230
def unify_button_widths(*buttons): wid = 0 for btn in buttons: wid = max(wid, len(btn['text'])) for btn in buttons: btn['width'] = wid
[ "def", "unify_button_widths", "(", "*", "buttons", ")", ":", "wid", "=", "0", "for", "btn", "in", "buttons", ":", "wid", "=", "max", "(", "wid", ",", "len", "(", "btn", "[", "'text'", "]", ")", ")", "for", "btn", "in", "buttons", ":", "btn", "[",...
make buttons passed in all have the same width .
train
false