id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
7,138
def request_lastfm(method, **kwargs): kwargs['method'] = method kwargs.setdefault('api_key', API_KEY) kwargs.setdefault('format', 'json') logger.debug('Calling Last.FM method: %s', method) logger.debug('Last.FM call parameters: %s', kwargs) data = request.request_json(ENTRY_POINT, timeout=TIMEOUT, params=kwargs, lock=lastfm_lock) if (not data): logger.error('Error calling Last.FM method: %s', method) return if ('error' in data): logger.error('Last.FM returned an error: %s', data['message']) return return data
[ "def", "request_lastfm", "(", "method", ",", "**", "kwargs", ")", ":", "kwargs", "[", "'method'", "]", "=", "method", "kwargs", ".", "setdefault", "(", "'api_key'", ",", "API_KEY", ")", "kwargs", ".", "setdefault", "(", "'format'", ",", "'json'", ")", "logger", ".", "debug", "(", "'Calling Last.FM method: %s'", ",", "method", ")", "logger", ".", "debug", "(", "'Last.FM call parameters: %s'", ",", "kwargs", ")", "data", "=", "request", ".", "request_json", "(", "ENTRY_POINT", ",", "timeout", "=", "TIMEOUT", ",", "params", "=", "kwargs", ",", "lock", "=", "lastfm_lock", ")", "if", "(", "not", "data", ")", ":", "logger", ".", "error", "(", "'Error calling Last.FM method: %s'", ",", "method", ")", "return", "if", "(", "'error'", "in", "data", ")", ":", "logger", ".", "error", "(", "'Last.FM returned an error: %s'", ",", "data", "[", "'message'", "]", ")", "return", "return", "data" ]
call a last .
train
false
7,140
def my_callback(ax, ch_idx): ax.plot(freqs, psds[ch_idx], color='red') ax.set_xlabel = 'Frequency (Hz)' ax.set_ylabel = 'Power (dB)'
[ "def", "my_callback", "(", "ax", ",", "ch_idx", ")", ":", "ax", ".", "plot", "(", "freqs", ",", "psds", "[", "ch_idx", "]", ",", "color", "=", "'red'", ")", "ax", ".", "set_xlabel", "=", "'Frequency (Hz)'", "ax", ".", "set_ylabel", "=", "'Power (dB)'" ]
this block of code is executed once you click on one of the channel axes in the plot .
train
false
7,141
def build_image_encoder(tparams, options): opt_ret = dict() trng = RandomStreams(1234) im = tensor.matrix('im', dtype='float32') images = get_layer('ff')[1](tparams, im, options, prefix='ff_image', activ='linear') images = l2norm(images) return (trng, [im], images)
[ "def", "build_image_encoder", "(", "tparams", ",", "options", ")", ":", "opt_ret", "=", "dict", "(", ")", "trng", "=", "RandomStreams", "(", "1234", ")", "im", "=", "tensor", ".", "matrix", "(", "'im'", ",", "dtype", "=", "'float32'", ")", "images", "=", "get_layer", "(", "'ff'", ")", "[", "1", "]", "(", "tparams", ",", "im", ",", "options", ",", "prefix", "=", "'ff_image'", ",", "activ", "=", "'linear'", ")", "images", "=", "l2norm", "(", "images", ")", "return", "(", "trng", ",", "[", "im", "]", ",", "images", ")" ]
encoder only .
train
false
7,142
def application_start(context, view): context.app.set_view(view) view.show() view.raise_() runtask = qtutils.RunTask(parent=view) init_update_task(view, runtask, context.model) fsmonitor.current().start() msg_timer = QtCore.QTimer() msg_timer.setSingleShot(True) msg_timer.timeout.connect(_send_msg) msg_timer.start(0) result = context.app.exec_() fsmonitor.current().stop() QtCore.QThreadPool.globalInstance().waitForDone() return result
[ "def", "application_start", "(", "context", ",", "view", ")", ":", "context", ".", "app", ".", "set_view", "(", "view", ")", "view", ".", "show", "(", ")", "view", ".", "raise_", "(", ")", "runtask", "=", "qtutils", ".", "RunTask", "(", "parent", "=", "view", ")", "init_update_task", "(", "view", ",", "runtask", ",", "context", ".", "model", ")", "fsmonitor", ".", "current", "(", ")", ".", "start", "(", ")", "msg_timer", "=", "QtCore", ".", "QTimer", "(", ")", "msg_timer", ".", "setSingleShot", "(", "True", ")", "msg_timer", ".", "timeout", ".", "connect", "(", "_send_msg", ")", "msg_timer", ".", "start", "(", "0", ")", "result", "=", "context", ".", "app", ".", "exec_", "(", ")", "fsmonitor", ".", "current", "(", ")", ".", "stop", "(", ")", "QtCore", ".", "QThreadPool", ".", "globalInstance", "(", ")", ".", "waitForDone", "(", ")", "return", "result" ]
show the gui and start the main event loop .
train
false
7,143
def __routes_doctest(): pass
[ "def", "__routes_doctest", "(", ")", ":", "pass" ]
dummy function for doctesting routes .
train
false
7,145
def create_folder_structure(depth=2, sibling=2, parent=None): if ((depth > 0) and (sibling > 0)): depth_range = list(range(1, (depth + 1))) depth_range.reverse() for d in depth_range: for s in range(1, (sibling + 1)): name = ('folder: %s -- %s' % (str(d), str(s))) folder = Folder(name=name, parent=parent) folder.save() create_folder_structure(depth=(d - 1), sibling=sibling, parent=folder)
[ "def", "create_folder_structure", "(", "depth", "=", "2", ",", "sibling", "=", "2", ",", "parent", "=", "None", ")", ":", "if", "(", "(", "depth", ">", "0", ")", "and", "(", "sibling", ">", "0", ")", ")", ":", "depth_range", "=", "list", "(", "range", "(", "1", ",", "(", "depth", "+", "1", ")", ")", ")", "depth_range", ".", "reverse", "(", ")", "for", "d", "in", "depth_range", ":", "for", "s", "in", "range", "(", "1", ",", "(", "sibling", "+", "1", ")", ")", ":", "name", "=", "(", "'folder: %s -- %s'", "%", "(", "str", "(", "d", ")", ",", "str", "(", "s", ")", ")", ")", "folder", "=", "Folder", "(", "name", "=", "name", ",", "parent", "=", "parent", ")", "folder", ".", "save", "(", ")", "create_folder_structure", "(", "depth", "=", "(", "d", "-", "1", ")", ",", "sibling", "=", "sibling", ",", "parent", "=", "folder", ")" ]
this method creates a folder structure of the specified depth .
train
false
7,148
def create_output(): if args.output: if (args.output == '-'): out = FileOutput(fd=stdout) else: out = check_file_output(args.output, args.force) elif args.stdout: out = FileOutput(fd=stdout) else: http = namedpipe = None if (not args.player): console.exit('The default player (VLC) does not seem to be installed. You must specify the path to a player executable with --player.') if args.player_fifo: pipename = 'livestreamerpipe-{0}'.format(os.getpid()) console.logger.info('Creating pipe {0}', pipename) try: namedpipe = NamedPipe(pipename) except IOError as err: console.exit('Failed to create pipe: {0}', err) elif args.player_http: http = create_http_server() console.logger.info('Starting player: {0}', args.player) out = PlayerOutput(args.player, args=args.player_args, quiet=(not args.verbose_player), kill=(not args.player_no_close), namedpipe=namedpipe, http=http) return out
[ "def", "create_output", "(", ")", ":", "if", "args", ".", "output", ":", "if", "(", "args", ".", "output", "==", "'-'", ")", ":", "out", "=", "FileOutput", "(", "fd", "=", "stdout", ")", "else", ":", "out", "=", "check_file_output", "(", "args", ".", "output", ",", "args", ".", "force", ")", "elif", "args", ".", "stdout", ":", "out", "=", "FileOutput", "(", "fd", "=", "stdout", ")", "else", ":", "http", "=", "namedpipe", "=", "None", "if", "(", "not", "args", ".", "player", ")", ":", "console", ".", "exit", "(", "'The default player (VLC) does not seem to be installed. You must specify the path to a player executable with --player.'", ")", "if", "args", ".", "player_fifo", ":", "pipename", "=", "'livestreamerpipe-{0}'", ".", "format", "(", "os", ".", "getpid", "(", ")", ")", "console", ".", "logger", ".", "info", "(", "'Creating pipe {0}'", ",", "pipename", ")", "try", ":", "namedpipe", "=", "NamedPipe", "(", "pipename", ")", "except", "IOError", "as", "err", ":", "console", ".", "exit", "(", "'Failed to create pipe: {0}'", ",", "err", ")", "elif", "args", ".", "player_http", ":", "http", "=", "create_http_server", "(", ")", "console", ".", "logger", ".", "info", "(", "'Starting player: {0}'", ",", "args", ".", "player", ")", "out", "=", "PlayerOutput", "(", "args", ".", "player", ",", "args", "=", "args", ".", "player_args", ",", "quiet", "=", "(", "not", "args", ".", "verbose_player", ")", ",", "kill", "=", "(", "not", "args", ".", "player_no_close", ")", ",", "namedpipe", "=", "namedpipe", ",", "http", "=", "http", ")", "return", "out" ]
return an :class:~prompt_toolkit .
train
false
7,149
def stft_norm2(X): X2 = (X * X.conj()).real norms2 = (((2.0 * X2.sum(axis=2).sum(axis=1)) - np.sum(X2[:, 0, :], axis=1)) - np.sum(X2[:, (-1), :], axis=1)) return norms2
[ "def", "stft_norm2", "(", "X", ")", ":", "X2", "=", "(", "X", "*", "X", ".", "conj", "(", ")", ")", ".", "real", "norms2", "=", "(", "(", "(", "2.0", "*", "X2", ".", "sum", "(", "axis", "=", "2", ")", ".", "sum", "(", "axis", "=", "1", ")", ")", "-", "np", ".", "sum", "(", "X2", "[", ":", ",", "0", ",", ":", "]", ",", "axis", "=", "1", ")", ")", "-", "np", ".", "sum", "(", "X2", "[", ":", ",", "(", "-", "1", ")", ",", ":", "]", ",", "axis", "=", "1", ")", ")", "return", "norms2" ]
compute l2 norm of stft transform .
train
false
7,150
def solidity_library_symbol(library_name): length = min(len(library_name), 36) library_piece = library_name[:length] hold_piece = ('_' * (36 - length)) return '__{library}{hold}__'.format(library=library_piece, hold=hold_piece)
[ "def", "solidity_library_symbol", "(", "library_name", ")", ":", "length", "=", "min", "(", "len", "(", "library_name", ")", ",", "36", ")", "library_piece", "=", "library_name", "[", ":", "length", "]", "hold_piece", "=", "(", "'_'", "*", "(", "36", "-", "length", ")", ")", "return", "'__{library}{hold}__'", ".", "format", "(", "library", "=", "library_piece", ",", "hold", "=", "hold_piece", ")" ]
return the symbol used in the bytecode to represent the library_name .
train
true
7,151
def generate_totp_secret(length=10): assert (type(length) in six.integer_types) return base64.b32encode(os.urandom(length)).decode('ascii')
[ "def", "generate_totp_secret", "(", "length", "=", "10", ")", ":", "assert", "(", "type", "(", "length", ")", "in", "six", ".", "integer_types", ")", "return", "base64", ".", "b32encode", "(", "os", ".", "urandom", "(", "length", ")", ")", ".", "decode", "(", "'ascii'", ")" ]
generates a new base32 encoded .
train
false
7,152
def histograms(img, nbins): return _histograms.histograms(img, nbins)
[ "def", "histograms", "(", "img", ",", "nbins", ")", ":", "return", "_histograms", ".", "histograms", "(", "img", ",", "nbins", ")" ]
calculate the channel histograms of the current image .
train
false
7,154
def serialize_value(field_type, value): try: return app.data.serializers[field_type](value) except (KeyError, ValueError, TypeError, InvalidId): return value
[ "def", "serialize_value", "(", "field_type", ",", "value", ")", ":", "try", ":", "return", "app", ".", "data", ".", "serializers", "[", "field_type", "]", "(", "value", ")", "except", "(", "KeyError", ",", "ValueError", ",", "TypeError", ",", "InvalidId", ")", ":", "return", "value" ]
serialize value of a given type .
train
false
7,156
@decorator def close_open_connections(fn, *args, **kw): try: fn(*args, **kw) finally: testing_reaper.close_all()
[ "@", "decorator", "def", "close_open_connections", "(", "fn", ",", "*", "args", ",", "**", "kw", ")", ":", "try", ":", "fn", "(", "*", "args", ",", "**", "kw", ")", "finally", ":", "testing_reaper", ".", "close_all", "(", ")" ]
decorator that closes all connections after fn execution .
train
false
7,157
def fromqpixmap(im): from . import ImageQt if (not ImageQt.qt_is_installed): raise ImportError('Qt bindings are not installed') return ImageQt.fromqpixmap(im)
[ "def", "fromqpixmap", "(", "im", ")", ":", "from", ".", "import", "ImageQt", "if", "(", "not", "ImageQt", ".", "qt_is_installed", ")", ":", "raise", "ImportError", "(", "'Qt bindings are not installed'", ")", "return", "ImageQt", ".", "fromqpixmap", "(", "im", ")" ]
creates an image instance from a qpixmap image .
train
false
7,158
def is_score_higher(earned1, possible1, earned2, possible2): (is_higher, _, _) = compare_scores(earned1, possible1, earned2, possible2) return is_higher
[ "def", "is_score_higher", "(", "earned1", ",", "possible1", ",", "earned2", ",", "possible2", ")", ":", "(", "is_higher", ",", "_", ",", "_", ")", "=", "compare_scores", "(", "earned1", ",", "possible1", ",", "earned2", ",", "possible2", ")", "return", "is_higher" ]
returns whether the 2nd set of scores is higher than the first .
train
false
7,159
def _object_reducer(o, names=('id', 'name', 'path', 'httpMethod', 'statusCode', 'Created', 'Deleted', 'Updated', 'Flushed', 'Associated', 'Disassociated')): result = {} if isinstance(o, dict): for (k, v) in six.iteritems(o): if isinstance(v, dict): reduced = (v if (k == 'variables') else _object_reducer(v, names)) if (reduced or _name_matches(k, names)): result[k] = reduced elif isinstance(v, list): newlist = [] for val in v: reduced = _object_reducer(val, names) if (reduced or _name_matches(k, names)): newlist.append(reduced) if newlist: result[k] = newlist elif _name_matches(k, names): result[k] = v return result
[ "def", "_object_reducer", "(", "o", ",", "names", "=", "(", "'id'", ",", "'name'", ",", "'path'", ",", "'httpMethod'", ",", "'statusCode'", ",", "'Created'", ",", "'Deleted'", ",", "'Updated'", ",", "'Flushed'", ",", "'Associated'", ",", "'Disassociated'", ")", ")", ":", "result", "=", "{", "}", "if", "isinstance", "(", "o", ",", "dict", ")", ":", "for", "(", "k", ",", "v", ")", "in", "six", ".", "iteritems", "(", "o", ")", ":", "if", "isinstance", "(", "v", ",", "dict", ")", ":", "reduced", "=", "(", "v", "if", "(", "k", "==", "'variables'", ")", "else", "_object_reducer", "(", "v", ",", "names", ")", ")", "if", "(", "reduced", "or", "_name_matches", "(", "k", ",", "names", ")", ")", ":", "result", "[", "k", "]", "=", "reduced", "elif", "isinstance", "(", "v", ",", "list", ")", ":", "newlist", "=", "[", "]", "for", "val", "in", "v", ":", "reduced", "=", "_object_reducer", "(", "val", ",", "names", ")", "if", "(", "reduced", "or", "_name_matches", "(", "k", ",", "names", ")", ")", ":", "newlist", ".", "append", "(", "reduced", ")", "if", "newlist", ":", "result", "[", "k", "]", "=", "newlist", "elif", "_name_matches", "(", "k", ",", "names", ")", ":", "result", "[", "k", "]", "=", "v", "return", "result" ]
helper function to reduce the amount of information that will be kept in the change log for api gw related return values .
train
true
7,160
def skip_if_no_package(*args, **kwargs): from nose import SkipTest package_check(exc_failed_import=SkipTest, exc_failed_check=SkipTest, *args, **kwargs)
[ "def", "skip_if_no_package", "(", "*", "args", ",", "**", "kwargs", ")", ":", "from", "nose", "import", "SkipTest", "package_check", "(", "exc_failed_import", "=", "SkipTest", ",", "exc_failed_check", "=", "SkipTest", ",", "*", "args", ",", "**", "kwargs", ")" ]
raise skiptest if package_check fails parameters *args positional parameters passed to package_check *kwargs keyword parameters passed to package_check .
train
false
7,161
def get_release_group_image_list(releasegroupid): return _caa_request(releasegroupid, entitytype='release-group')
[ "def", "get_release_group_image_list", "(", "releasegroupid", ")", ":", "return", "_caa_request", "(", "releasegroupid", ",", "entitytype", "=", "'release-group'", ")" ]
get the list of cover art associated with a release group .
train
false
7,162
def test_pretty_class(): class C: pass class D(object, ): pass assert (pretty(C) == str(C)) assert (pretty(D) == str(D))
[ "def", "test_pretty_class", "(", ")", ":", "class", "C", ":", "pass", "class", "D", "(", "object", ",", ")", ":", "pass", "assert", "(", "pretty", "(", "C", ")", "==", "str", "(", "C", ")", ")", "assert", "(", "pretty", "(", "D", ")", "==", "str", "(", "D", ")", ")" ]
test that the printer dispatcher correctly handles classes .
train
false
7,163
def primitive_delete(course, num): tabs = course.tabs validate_args(num, tabs[num].get('type', '')) del tabs[num] modulestore().update_item(course, ModuleStoreEnum.UserID.primitive_command)
[ "def", "primitive_delete", "(", "course", ",", "num", ")", ":", "tabs", "=", "course", ".", "tabs", "validate_args", "(", "num", ",", "tabs", "[", "num", "]", ".", "get", "(", "'type'", ",", "''", ")", ")", "del", "tabs", "[", "num", "]", "modulestore", "(", ")", ".", "update_item", "(", "course", ",", "ModuleStoreEnum", ".", "UserID", ".", "primitive_command", ")" ]
deletes the given tab number .
train
false
7,164
def migrate_log(logs): logs_count = logs.count() count = 0 for log in logs: count += 1 node = (log.params.get('node') or log.params.get('project')) params_node = Node.load(node) if params_node.is_registration: log.params['node'] = get_registered_from(params_node) log.params['registration'] = params_node._id else: log.params['registration'] = RegistrationApproval.load(log.params['registration_approval_id'])._get_registration()._id log.save() logger.info('{}/{} Finished migrating log {}: registration action {}. params[node]={} and params[registration]={}'.format(count, logs_count, log._id, log.action, log.params['node'], log.params['registration']))
[ "def", "migrate_log", "(", "logs", ")", ":", "logs_count", "=", "logs", ".", "count", "(", ")", "count", "=", "0", "for", "log", "in", "logs", ":", "count", "+=", "1", "node", "=", "(", "log", ".", "params", ".", "get", "(", "'node'", ")", "or", "log", ".", "params", ".", "get", "(", "'project'", ")", ")", "params_node", "=", "Node", ".", "load", "(", "node", ")", "if", "params_node", ".", "is_registration", ":", "log", ".", "params", "[", "'node'", "]", "=", "get_registered_from", "(", "params_node", ")", "log", ".", "params", "[", "'registration'", "]", "=", "params_node", ".", "_id", "else", ":", "log", ".", "params", "[", "'registration'", "]", "=", "RegistrationApproval", ".", "load", "(", "log", ".", "params", "[", "'registration_approval_id'", "]", ")", ".", "_get_registration", "(", ")", ".", "_id", "log", ".", "save", "(", ")", "logger", ".", "info", "(", "'{}/{} Finished migrating log {}: registration action {}. params[node]={} and params[registration]={}'", ".", "format", "(", "count", ",", "logs_count", ",", "log", ".", "_id", ",", "log", ".", "action", ",", "log", ".", "params", "[", "'node'", "]", ",", "log", ".", "params", "[", "'registration'", "]", ")", ")" ]
migrates registration logs to set params[node] to registered_from and params[registration] to the registration .
train
false
7,165
def libvlc_audio_set_callbacks(mp, play, pause, resume, flush, drain, opaque): f = (_Cfunctions.get('libvlc_audio_set_callbacks', None) or _Cfunction('libvlc_audio_set_callbacks', ((1,), (1,), (1,), (1,), (1,), (1,), (1,)), None, None, MediaPlayer, AudioPlayCb, AudioPauseCb, AudioResumeCb, AudioFlushCb, AudioDrainCb, ctypes.c_void_p)) return f(mp, play, pause, resume, flush, drain, opaque)
[ "def", "libvlc_audio_set_callbacks", "(", "mp", ",", "play", ",", "pause", ",", "resume", ",", "flush", ",", "drain", ",", "opaque", ")", ":", "f", "=", "(", "_Cfunctions", ".", "get", "(", "'libvlc_audio_set_callbacks'", ",", "None", ")", "or", "_Cfunction", "(", "'libvlc_audio_set_callbacks'", ",", "(", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ",", "(", "1", ",", ")", ")", ",", "None", ",", "None", ",", "MediaPlayer", ",", "AudioPlayCb", ",", "AudioPauseCb", ",", "AudioResumeCb", ",", "AudioFlushCb", ",", "AudioDrainCb", ",", "ctypes", ".", "c_void_p", ")", ")", "return", "f", "(", "mp", ",", "play", ",", "pause", ",", "resume", ",", "flush", ",", "drain", ",", "opaque", ")" ]
set callbacks and private data for decoded audio .
train
true
7,166
def make_small_graph(graph_description, create_using=None): ltype = graph_description[0] name = graph_description[1] n = graph_description[2] G = empty_graph(n, create_using) nodes = G.nodes() if (ltype == 'adjacencylist'): adjlist = graph_description[3] if (len(adjlist) != n): raise NetworkXError('invalid graph_description') G.add_edges_from([((u - 1), v) for v in nodes for u in adjlist[v]]) elif (ltype == 'edgelist'): edgelist = graph_description[3] for e in edgelist: v1 = (e[0] - 1) v2 = (e[1] - 1) if ((v1 < 0) or (v1 > (n - 1)) or (v2 < 0) or (v2 > (n - 1))): raise NetworkXError('invalid graph_description') else: G.add_edge(v1, v2) G.name = name return G
[ "def", "make_small_graph", "(", "graph_description", ",", "create_using", "=", "None", ")", ":", "ltype", "=", "graph_description", "[", "0", "]", "name", "=", "graph_description", "[", "1", "]", "n", "=", "graph_description", "[", "2", "]", "G", "=", "empty_graph", "(", "n", ",", "create_using", ")", "nodes", "=", "G", ".", "nodes", "(", ")", "if", "(", "ltype", "==", "'adjacencylist'", ")", ":", "adjlist", "=", "graph_description", "[", "3", "]", "if", "(", "len", "(", "adjlist", ")", "!=", "n", ")", ":", "raise", "NetworkXError", "(", "'invalid graph_description'", ")", "G", ".", "add_edges_from", "(", "[", "(", "(", "u", "-", "1", ")", ",", "v", ")", "for", "v", "in", "nodes", "for", "u", "in", "adjlist", "[", "v", "]", "]", ")", "elif", "(", "ltype", "==", "'edgelist'", ")", ":", "edgelist", "=", "graph_description", "[", "3", "]", "for", "e", "in", "edgelist", ":", "v1", "=", "(", "e", "[", "0", "]", "-", "1", ")", "v2", "=", "(", "e", "[", "1", "]", "-", "1", ")", "if", "(", "(", "v1", "<", "0", ")", "or", "(", "v1", ">", "(", "n", "-", "1", ")", ")", "or", "(", "v2", "<", "0", ")", "or", "(", "v2", ">", "(", "n", "-", "1", ")", ")", ")", ":", "raise", "NetworkXError", "(", "'invalid graph_description'", ")", "else", ":", "G", ".", "add_edge", "(", "v1", ",", "v2", ")", "G", ".", "name", "=", "name", "return", "G" ]
return the small graph described by graph_description .
train
false
7,167
def is_python_source(filename): return (os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS)
[ "def", "is_python_source", "(", "filename", ")", ":", "return", "(", "os", ".", "path", ".", "splitext", "(", "filename", ")", "[", "1", "]", "[", "1", ":", "]", "in", "PY_SOURCE_EXTS", ")" ]
rtype: bool return: true if the filename is a python source file .
train
false
7,169
def test_rgb_to_hsl_part_3(): pass
[ "def", "test_rgb_to_hsl_part_3", "(", ")", ":", "pass" ]
test rgb to hsl color function .
train
false
7,170
def truncated_json_dumps(obj, max_length, key, ensure_ascii=False): orig = json.dumps(obj, ensure_ascii=ensure_ascii) diff = (len(orig) - max_length) if (diff < 0): return orig dupe = json.loads(orig) if (len(dupe[key]) < diff): raise TruncationException("Can't truncate enough to satisfy `max_length`.") dupe[key] = dupe[key][:(- diff)] return json.dumps(dupe, ensure_ascii=ensure_ascii)
[ "def", "truncated_json_dumps", "(", "obj", ",", "max_length", ",", "key", ",", "ensure_ascii", "=", "False", ")", ":", "orig", "=", "json", ".", "dumps", "(", "obj", ",", "ensure_ascii", "=", "ensure_ascii", ")", "diff", "=", "(", "len", "(", "orig", ")", "-", "max_length", ")", "if", "(", "diff", "<", "0", ")", ":", "return", "orig", "dupe", "=", "json", ".", "loads", "(", "orig", ")", "if", "(", "len", "(", "dupe", "[", "key", "]", ")", "<", "diff", ")", ":", "raise", "TruncationException", "(", "\"Can't truncate enough to satisfy `max_length`.\"", ")", "dupe", "[", "key", "]", "=", "dupe", "[", "key", "]", "[", ":", "(", "-", "diff", ")", "]", "return", "json", ".", "dumps", "(", "dupe", ",", "ensure_ascii", "=", "ensure_ascii", ")" ]
dump an object to json .
train
false
7,174
def get_devmm(name): name = os.path.expanduser(name) if (is_chrdev(name) or is_blkdev(name)): stat_structure = os.stat(name) return (os.major(stat_structure.st_rdev), os.minor(stat_structure.st_rdev)) else: return (0, 0)
[ "def", "get_devmm", "(", "name", ")", ":", "name", "=", "os", ".", "path", ".", "expanduser", "(", "name", ")", "if", "(", "is_chrdev", "(", "name", ")", "or", "is_blkdev", "(", "name", ")", ")", ":", "stat_structure", "=", "os", ".", "stat", "(", "name", ")", "return", "(", "os", ".", "major", "(", "stat_structure", ".", "st_rdev", ")", ",", "os", ".", "minor", "(", "stat_structure", ".", "st_rdev", ")", ")", "else", ":", "return", "(", "0", ",", "0", ")" ]
get major/minor info from a device cli example: .
train
true
7,175
def search_list(list_provided): for i in range((len(list_provided) - 1)): if ((not (list_provided[i] in list_provided[(i + 1):])) and (not (list_provided[i] in list_provided[:i]))): 'If the same number is not present before or after in the list then\n return the number' return str(list_provided[i]) break
[ "def", "search_list", "(", "list_provided", ")", ":", "for", "i", "in", "range", "(", "(", "len", "(", "list_provided", ")", "-", "1", ")", ")", ":", "if", "(", "(", "not", "(", "list_provided", "[", "i", "]", "in", "list_provided", "[", "(", "i", "+", "1", ")", ":", "]", ")", ")", "and", "(", "not", "(", "list_provided", "[", "i", "]", "in", "list_provided", "[", ":", "i", "]", ")", ")", ")", ":", "return", "str", "(", "list_provided", "[", "i", "]", ")", "break" ]
search list provided for characters that are represented only once .
train
false
7,176
def possible_deprecation_warning(config): if (cli_command != LEAUTO): return if config.no_self_upgrade: return if ('CERTBOT_AUTO' not in os.environ): logger.warning('You are running with an old copy of letsencrypt-auto that does not receive updates, and is less reliable than more recent versions. We recommend upgrading to the latest certbot-auto script, or using native OS packages.') logger.debug('Deprecation warning circumstances: %s / %s', sys.argv[0], os.environ)
[ "def", "possible_deprecation_warning", "(", "config", ")", ":", "if", "(", "cli_command", "!=", "LEAUTO", ")", ":", "return", "if", "config", ".", "no_self_upgrade", ":", "return", "if", "(", "'CERTBOT_AUTO'", "not", "in", "os", ".", "environ", ")", ":", "logger", ".", "warning", "(", "'You are running with an old copy of letsencrypt-auto that does not receive updates, and is less reliable than more recent versions. We recommend upgrading to the latest certbot-auto script, or using native OS packages.'", ")", "logger", ".", "debug", "(", "'Deprecation warning circumstances: %s / %s'", ",", "sys", ".", "argv", "[", "0", "]", ",", "os", ".", "environ", ")" ]
a deprecation warning for users with the old .
train
false
7,179
def start_ipython(argv=None, **kwargs): from IPython.terminal.ipapp import launch_new_instance return launch_new_instance(argv=argv, **kwargs)
[ "def", "start_ipython", "(", "argv", "=", "None", ",", "**", "kwargs", ")", ":", "from", "IPython", ".", "terminal", ".", "ipapp", "import", "launch_new_instance", "return", "launch_new_instance", "(", "argv", "=", "argv", ",", "**", "kwargs", ")" ]
launch a normal ipython instance ipython .
train
false
7,180
def _compute_regularization(alpha, l1_ratio, regularization): alpha_H = 0.0 alpha_W = 0.0 if (regularization in ('both', 'components')): alpha_H = float(alpha) if (regularization in ('both', 'transformation')): alpha_W = float(alpha) l1_reg_W = (alpha_W * l1_ratio) l1_reg_H = (alpha_H * l1_ratio) l2_reg_W = (alpha_W * (1.0 - l1_ratio)) l2_reg_H = (alpha_H * (1.0 - l1_ratio)) return (l1_reg_W, l1_reg_H, l2_reg_W, l2_reg_H)
[ "def", "_compute_regularization", "(", "alpha", ",", "l1_ratio", ",", "regularization", ")", ":", "alpha_H", "=", "0.0", "alpha_W", "=", "0.0", "if", "(", "regularization", "in", "(", "'both'", ",", "'components'", ")", ")", ":", "alpha_H", "=", "float", "(", "alpha", ")", "if", "(", "regularization", "in", "(", "'both'", ",", "'transformation'", ")", ")", ":", "alpha_W", "=", "float", "(", "alpha", ")", "l1_reg_W", "=", "(", "alpha_W", "*", "l1_ratio", ")", "l1_reg_H", "=", "(", "alpha_H", "*", "l1_ratio", ")", "l2_reg_W", "=", "(", "alpha_W", "*", "(", "1.0", "-", "l1_ratio", ")", ")", "l2_reg_H", "=", "(", "alpha_H", "*", "(", "1.0", "-", "l1_ratio", ")", ")", "return", "(", "l1_reg_W", ",", "l1_reg_H", ",", "l2_reg_W", ",", "l2_reg_H", ")" ]
compute l1 and l2 regularization coefficients for w and h .
train
false
7,181
def topic_coherence(lists, services=['ca', 'cp', 'cv', 'npmi', 'uci', 'umass']): url = u'http://palmetto.aksw.org/palmetto-webapp/service/{}?words={}' reqs = [url.format(s, '%20'.join(top[:10])) for s in services for top in lists] pool = multiprocessing.Pool() coherences = pool.map(get_request, reqs) pool.close() pool.terminate() pool.join() del pool args = [(j, s, top) for s in services for (j, top) in enumerate(lists)] ans = {} for ((j, s, t), tc) in zip(args, coherences): ans[(j, s)] = tc return ans
[ "def", "topic_coherence", "(", "lists", ",", "services", "=", "[", "'ca'", ",", "'cp'", ",", "'cv'", ",", "'npmi'", ",", "'uci'", ",", "'umass'", "]", ")", ":", "url", "=", "u'http://palmetto.aksw.org/palmetto-webapp/service/{}?words={}'", "reqs", "=", "[", "url", ".", "format", "(", "s", ",", "'%20'", ".", "join", "(", "top", "[", ":", "10", "]", ")", ")", "for", "s", "in", "services", "for", "top", "in", "lists", "]", "pool", "=", "multiprocessing", ".", "Pool", "(", ")", "coherences", "=", "pool", ".", "map", "(", "get_request", ",", "reqs", ")", "pool", ".", "close", "(", ")", "pool", ".", "terminate", "(", ")", "pool", ".", "join", "(", ")", "del", "pool", "args", "=", "[", "(", "j", ",", "s", ",", "top", ")", "for", "s", "in", "services", "for", "(", "j", ",", "top", ")", "in", "enumerate", "(", "lists", ")", "]", "ans", "=", "{", "}", "for", "(", "(", "j", ",", "s", ",", "t", ")", ",", "tc", ")", "in", "zip", "(", "args", ",", "coherences", ")", ":", "ans", "[", "(", "j", ",", "s", ")", "]", "=", "tc", "return", "ans" ]
requests the topic coherence from aksw palmetto arguments lists : list of lists a list of lists with one list of top words for each topic .
train
false
7,182
def _retrieve_course(course_key): course = modulestore().get_course(course_key, depth=0) if (course is None): raise CourseNotFoundError return course
[ "def", "_retrieve_course", "(", "course_key", ")", ":", "course", "=", "modulestore", "(", ")", ".", "get_course", "(", "course_key", ",", "depth", "=", "0", ")", "if", "(", "course", "is", "None", ")", ":", "raise", "CourseNotFoundError", "return", "course" ]
retrieves the course for the given course key .
train
false
7,183
def parse_scope(scope): assert scope, 'scope is required' if isinstance(scope, six.string_types): scope_list = scope.split(',') elif isinstance(scope, (list, tuple)): scope_list = list(scope) else: raise ValueError(('unrecognized type for scope %r' % scope)) return scope_list
[ "def", "parse_scope", "(", "scope", ")", ":", "assert", "scope", ",", "'scope is required'", "if", "isinstance", "(", "scope", ",", "six", ".", "string_types", ")", ":", "scope_list", "=", "scope", ".", "split", "(", "','", ")", "elif", "isinstance", "(", "scope", ",", "(", "list", ",", "tuple", ")", ")", ":", "scope_list", "=", "list", "(", "scope", ")", "else", ":", "raise", "ValueError", "(", "(", "'unrecognized type for scope %r'", "%", "scope", ")", ")", "return", "scope_list" ]
turns email .
train
false
7,184
def test_allknn_sample_wt_fit(): allknn = AllKNN(random_state=RND_SEED) assert_raises(RuntimeError, allknn.sample, X, Y)
[ "def", "test_allknn_sample_wt_fit", "(", ")", ":", "allknn", "=", "AllKNN", "(", "random_state", "=", "RND_SEED", ")", "assert_raises", "(", "RuntimeError", ",", "allknn", ".", "sample", ",", "X", ",", "Y", ")" ]
test either if an error is raised when sample is called before fitting .
train
false
7,187
def p4BranchesInGit(branchesAreInRemotes=True): branches = {} cmdline = 'git rev-parse --symbolic ' if branchesAreInRemotes: cmdline += '--remotes' else: cmdline += '--branches' for line in read_pipe_lines(cmdline): line = line.strip() if (not line.startswith('p4/')): continue if (line == 'p4/HEAD'): continue branch = line[len('p4/'):] branches[branch] = parseRevision(line) return branches
[ "def", "p4BranchesInGit", "(", "branchesAreInRemotes", "=", "True", ")", ":", "branches", "=", "{", "}", "cmdline", "=", "'git rev-parse --symbolic '", "if", "branchesAreInRemotes", ":", "cmdline", "+=", "'--remotes'", "else", ":", "cmdline", "+=", "'--branches'", "for", "line", "in", "read_pipe_lines", "(", "cmdline", ")", ":", "line", "=", "line", ".", "strip", "(", ")", "if", "(", "not", "line", ".", "startswith", "(", "'p4/'", ")", ")", ":", "continue", "if", "(", "line", "==", "'p4/HEAD'", ")", ":", "continue", "branch", "=", "line", "[", "len", "(", "'p4/'", ")", ":", "]", "branches", "[", "branch", "]", "=", "parseRevision", "(", "line", ")", "return", "branches" ]
find all the branches whose names start with "p4/" .
train
false
7,188
def _prepare_create_request(cluster): return messages_v2_pb2.CreateClusterRequest(parent=cluster._instance.name, cluster_id=cluster.cluster_id, cluster=data_v2_pb2.Cluster(serve_nodes=cluster.serve_nodes))
[ "def", "_prepare_create_request", "(", "cluster", ")", ":", "return", "messages_v2_pb2", ".", "CreateClusterRequest", "(", "parent", "=", "cluster", ".", "_instance", ".", "name", ",", "cluster_id", "=", "cluster", ".", "cluster_id", ",", "cluster", "=", "data_v2_pb2", ".", "Cluster", "(", "serve_nodes", "=", "cluster", ".", "serve_nodes", ")", ")" ]
creates a protobuf request for a createinstance request .
train
false
7,189
def follow_redirects(link, sites=None): def follow(url): return ((sites == None) or (urlparse.urlparse(url).hostname in sites)) class RedirectHandler(urllib2.HTTPRedirectHandler, ): def __init__(self): self.last_url = None def redirect_request(self, req, fp, code, msg, hdrs, newurl): self.last_url = newurl if (not follow(newurl)): return None r = urllib2.HTTPRedirectHandler.redirect_request(self, req, fp, code, msg, hdrs, newurl) r.get_method = (lambda : 'HEAD') return r if (not follow(link)): return link redirect_handler = RedirectHandler() opener = urllib2.build_opener(redirect_handler) req = urllib2.Request(link) req.get_method = (lambda : 'HEAD') try: with contextlib.closing(opener.open(req, timeout=1)) as site: return site.url except: return (redirect_handler.last_url if redirect_handler.last_url else link)
[ "def", "follow_redirects", "(", "link", ",", "sites", "=", "None", ")", ":", "def", "follow", "(", "url", ")", ":", "return", "(", "(", "sites", "==", "None", ")", "or", "(", "urlparse", ".", "urlparse", "(", "url", ")", ".", "hostname", "in", "sites", ")", ")", "class", "RedirectHandler", "(", "urllib2", ".", "HTTPRedirectHandler", ",", ")", ":", "def", "__init__", "(", "self", ")", ":", "self", ".", "last_url", "=", "None", "def", "redirect_request", "(", "self", ",", "req", ",", "fp", ",", "code", ",", "msg", ",", "hdrs", ",", "newurl", ")", ":", "self", ".", "last_url", "=", "newurl", "if", "(", "not", "follow", "(", "newurl", ")", ")", ":", "return", "None", "r", "=", "urllib2", ".", "HTTPRedirectHandler", ".", "redirect_request", "(", "self", ",", "req", ",", "fp", ",", "code", ",", "msg", ",", "hdrs", ",", "newurl", ")", "r", ".", "get_method", "=", "(", "lambda", ":", "'HEAD'", ")", "return", "r", "if", "(", "not", "follow", "(", "link", ")", ")", ":", "return", "link", "redirect_handler", "=", "RedirectHandler", "(", ")", "opener", "=", "urllib2", ".", "build_opener", "(", "redirect_handler", ")", "req", "=", "urllib2", ".", "Request", "(", "link", ")", "req", ".", "get_method", "=", "(", "lambda", ":", "'HEAD'", ")", "try", ":", "with", "contextlib", ".", "closing", "(", "opener", ".", "open", "(", "req", ",", "timeout", "=", "1", ")", ")", "as", "site", ":", "return", "site", ".", "url", "except", ":", "return", "(", "redirect_handler", ".", "last_url", "if", "redirect_handler", ".", "last_url", "else", "link", ")" ]
follow directs for the link as long as the redirects are on the given sites and return the resolved link .
train
false
7,190
def element(cxel_str): _xml = xml(cxel_str) return parse_xml(_xml)
[ "def", "element", "(", "cxel_str", ")", ":", "_xml", "=", "xml", "(", "cxel_str", ")", "return", "parse_xml", "(", "_xml", ")" ]
return an oxml element parsed from the xml generated from *cxel_str* .
train
false
7,191
def splitport(host): global _portprog if (_portprog is None): import re _portprog = re.compile('^(.*):([0-9]*)$') match = _portprog.match(host) if match: (host, port) = match.groups() if port: return (host, port) return (host, None)
[ "def", "splitport", "(", "host", ")", ":", "global", "_portprog", "if", "(", "_portprog", "is", "None", ")", ":", "import", "re", "_portprog", "=", "re", ".", "compile", "(", "'^(.*):([0-9]*)$'", ")", "match", "=", "_portprog", ".", "match", "(", "host", ")", "if", "match", ":", "(", "host", ",", "port", ")", "=", "match", ".", "groups", "(", ")", "if", "port", ":", "return", "(", "host", ",", "port", ")", "return", "(", "host", ",", "None", ")" ]
splitport --> host .
train
false
7,192
def make_shared(shape): raise NotImplementedError('TODO: implement the function')
[ "def", "make_shared", "(", "shape", ")", ":", "raise", "NotImplementedError", "(", "'TODO: implement the function'", ")" ]
returns a theano shared variable containing a tensor of the specified shape .
train
false
7,193
def has_studio_write_access(user, course_key): return bool((STUDIO_EDIT_CONTENT & get_user_permissions(user, course_key)))
[ "def", "has_studio_write_access", "(", "user", ",", "course_key", ")", ":", "return", "bool", "(", "(", "STUDIO_EDIT_CONTENT", "&", "get_user_permissions", "(", "user", ",", "course_key", ")", ")", ")" ]
return true if user has studio write access to the given course .
train
false
7,194
def service_list(): r = salt.utils.http.query((DETAILS['url'] + 'service/list'), decode_type='json', decode=True) return r['dict']
[ "def", "service_list", "(", ")", ":", "r", "=", "salt", ".", "utils", ".", "http", ".", "query", "(", "(", "DETAILS", "[", "'url'", "]", "+", "'service/list'", ")", ",", "decode_type", "=", "'json'", ",", "decode", "=", "True", ")", "return", "r", "[", "'dict'", "]" ]
return a list of available services cli example: .
train
true
7,195
def get_first_defined(data, keys, default_value=None): for key in keys: try: return data[key] except KeyError: pass return default_value
[ "def", "get_first_defined", "(", "data", ",", "keys", ",", "default_value", "=", "None", ")", ":", "for", "key", "in", "keys", ":", "try", ":", "return", "data", "[", "key", "]", "except", "KeyError", ":", "pass", "return", "default_value" ]
get the first defined key in data .
train
true
7,196
def select_db_reader_mode(f): return IMPL.select_db_reader_mode(f)
[ "def", "select_db_reader_mode", "(", "f", ")", ":", "return", "IMPL", ".", "select_db_reader_mode", "(", "f", ")" ]
decorator to select synchronous or asynchronous reader mode .
train
false
7,197
def _generate_min_degree(gamma, average_degree, max_degree, tolerance, max_iters): min_deg_top = max_degree min_deg_bot = 1 min_deg_mid = (((min_deg_top - min_deg_bot) / 2) + min_deg_bot) itrs = 0 mid_avg_deg = 0 while (abs((mid_avg_deg - average_degree)) > tolerance): if (itrs > max_iters): raise nx.ExceededMaxIterations('Could not match average_degree') mid_avg_deg = 0 for x in range(int(min_deg_mid), (max_degree + 1)): mid_avg_deg += ((x ** ((- gamma) + 1)) / zeta(gamma, min_deg_mid, tolerance)) if (mid_avg_deg > average_degree): min_deg_top = min_deg_mid min_deg_mid = (((min_deg_top - min_deg_bot) / 2) + min_deg_bot) else: min_deg_bot = min_deg_mid min_deg_mid = (((min_deg_top - min_deg_bot) / 2) + min_deg_bot) itrs += 1 return round(min_deg_mid)
[ "def", "_generate_min_degree", "(", "gamma", ",", "average_degree", ",", "max_degree", ",", "tolerance", ",", "max_iters", ")", ":", "min_deg_top", "=", "max_degree", "min_deg_bot", "=", "1", "min_deg_mid", "=", "(", "(", "(", "min_deg_top", "-", "min_deg_bot", ")", "/", "2", ")", "+", "min_deg_bot", ")", "itrs", "=", "0", "mid_avg_deg", "=", "0", "while", "(", "abs", "(", "(", "mid_avg_deg", "-", "average_degree", ")", ")", ">", "tolerance", ")", ":", "if", "(", "itrs", ">", "max_iters", ")", ":", "raise", "nx", ".", "ExceededMaxIterations", "(", "'Could not match average_degree'", ")", "mid_avg_deg", "=", "0", "for", "x", "in", "range", "(", "int", "(", "min_deg_mid", ")", ",", "(", "max_degree", "+", "1", ")", ")", ":", "mid_avg_deg", "+=", "(", "(", "x", "**", "(", "(", "-", "gamma", ")", "+", "1", ")", ")", "/", "zeta", "(", "gamma", ",", "min_deg_mid", ",", "tolerance", ")", ")", "if", "(", "mid_avg_deg", ">", "average_degree", ")", ":", "min_deg_top", "=", "min_deg_mid", "min_deg_mid", "=", "(", "(", "(", "min_deg_top", "-", "min_deg_bot", ")", "/", "2", ")", "+", "min_deg_bot", ")", "else", ":", "min_deg_bot", "=", "min_deg_mid", "min_deg_mid", "=", "(", "(", "(", "min_deg_top", "-", "min_deg_bot", ")", "/", "2", ")", "+", "min_deg_bot", ")", "itrs", "+=", "1", "return", "round", "(", "min_deg_mid", ")" ]
returns a minimum degree from the given average degree .
train
false
7,198
def dmp_primitive(f, u, K): (cont, v) = (dmp_content(f, u, K), (u - 1)) if (dmp_zero_p(f, u) or dmp_one_p(cont, v, K)): return (cont, f) else: return (cont, [dmp_quo(c, cont, v, K) for c in f])
[ "def", "dmp_primitive", "(", "f", ",", "u", ",", "K", ")", ":", "(", "cont", ",", "v", ")", "=", "(", "dmp_content", "(", "f", ",", "u", ",", "K", ")", ",", "(", "u", "-", "1", ")", ")", "if", "(", "dmp_zero_p", "(", "f", ",", "u", ")", "or", "dmp_one_p", "(", "cont", ",", "v", ",", "K", ")", ")", ":", "return", "(", "cont", ",", "f", ")", "else", ":", "return", "(", "cont", ",", "[", "dmp_quo", "(", "c", ",", "cont", ",", "v", ",", "K", ")", "for", "c", "in", "f", "]", ")" ]
returns multivariate content and a primitive polynomial .
train
false
7,199
def _install_from_path(path): if (not os.path.exists(path)): msg = 'File not found: {0}'.format(path) raise SaltInvocationError(msg) cmd = 'installer -pkg "{0}" -target /'.format(path) return salt.utils.mac_utils.execute_return_success(cmd)
[ "def", "_install_from_path", "(", "path", ")", ":", "if", "(", "not", "os", ".", "path", ".", "exists", "(", "path", ")", ")", ":", "msg", "=", "'File not found: {0}'", ".", "format", "(", "path", ")", "raise", "SaltInvocationError", "(", "msg", ")", "cmd", "=", "'installer -pkg \"{0}\" -target /'", ".", "format", "(", "path", ")", "return", "salt", ".", "utils", ".", "mac_utils", ".", "execute_return_success", "(", "cmd", ")" ]
internal function to install a package from the given path .
train
true
7,200
def get_dynamic_link_map(for_delete=False): if ((getattr(frappe.local, 'dynamic_link_map', None) == None) or frappe.flags.in_test): dynamic_link_map = {} for df in get_dynamic_links(): meta = frappe.get_meta(df.parent) if meta.issingle: dynamic_link_map.setdefault(meta.name, []).append(df) else: links = frappe.db.sql_list('select distinct {options} from `tab{parent}`'.format(**df)) for doctype in links: dynamic_link_map.setdefault(doctype, []).append(df) frappe.local.dynamic_link_map = dynamic_link_map return frappe.local.dynamic_link_map
[ "def", "get_dynamic_link_map", "(", "for_delete", "=", "False", ")", ":", "if", "(", "(", "getattr", "(", "frappe", ".", "local", ",", "'dynamic_link_map'", ",", "None", ")", "==", "None", ")", "or", "frappe", ".", "flags", ".", "in_test", ")", ":", "dynamic_link_map", "=", "{", "}", "for", "df", "in", "get_dynamic_links", "(", ")", ":", "meta", "=", "frappe", ".", "get_meta", "(", "df", ".", "parent", ")", "if", "meta", ".", "issingle", ":", "dynamic_link_map", ".", "setdefault", "(", "meta", ".", "name", ",", "[", "]", ")", ".", "append", "(", "df", ")", "else", ":", "links", "=", "frappe", ".", "db", ".", "sql_list", "(", "'select distinct {options} from `tab{parent}`'", ".", "format", "(", "**", "df", ")", ")", "for", "doctype", "in", "links", ":", "dynamic_link_map", ".", "setdefault", "(", "doctype", ",", "[", "]", ")", ".", "append", "(", "df", ")", "frappe", ".", "local", ".", "dynamic_link_map", "=", "dynamic_link_map", "return", "frappe", ".", "local", ".", "dynamic_link_map" ]
build a map of all dynamically linked tables .
train
false
7,201
def svd(a, name=None): return tsqr(a, name, compute_svd=True)
[ "def", "svd", "(", "a", ",", "name", "=", "None", ")", ":", "return", "tsqr", "(", "a", ",", "name", ",", "compute_svd", "=", "True", ")" ]
this function performs the svd on cpu .
train
false
7,202
def createURLs(): urls = [] for x in range(0, randint(20, 100)): name = 'DEBUG_API' if (randint(0, 5) == 5): name = '' urls.append((name + ''.join(sample(string.ascii_letters, randint(10, 20))))) return urls
[ "def", "createURLs", "(", ")", ":", "urls", "=", "[", "]", "for", "x", "in", "range", "(", "0", ",", "randint", "(", "20", ",", "100", ")", ")", ":", "name", "=", "'DEBUG_API'", "if", "(", "randint", "(", "0", ",", "5", ")", "==", "5", ")", ":", "name", "=", "''", "urls", ".", "append", "(", "(", "name", "+", "''", ".", "join", "(", "sample", "(", "string", ".", "ascii_letters", ",", "randint", "(", "10", ",", "20", ")", ")", ")", ")", ")", "return", "urls" ]
create some urls .
train
false
7,203
def data_to_n(data): if (data[0] <= 62): return (data[0], data[1:]) if (data[1] <= 62): return ((((data[1] << 12) + (data[2] << 6)) + data[3]), data[4:]) return (((((((data[2] << 30) + (data[3] << 24)) + (data[4] << 18)) + (data[5] << 12)) + (data[6] << 6)) + data[7]), data[8:])
[ "def", "data_to_n", "(", "data", ")", ":", "if", "(", "data", "[", "0", "]", "<=", "62", ")", ":", "return", "(", "data", "[", "0", "]", ",", "data", "[", "1", ":", "]", ")", "if", "(", "data", "[", "1", "]", "<=", "62", ")", ":", "return", "(", "(", "(", "(", "data", "[", "1", "]", "<<", "12", ")", "+", "(", "data", "[", "2", "]", "<<", "6", ")", ")", "+", "data", "[", "3", "]", ")", ",", "data", "[", "4", ":", "]", ")", "return", "(", "(", "(", "(", "(", "(", "(", "data", "[", "2", "]", "<<", "30", ")", "+", "(", "data", "[", "3", "]", "<<", "24", ")", ")", "+", "(", "data", "[", "4", "]", "<<", "18", ")", ")", "+", "(", "data", "[", "5", "]", "<<", "12", ")", ")", "+", "(", "data", "[", "6", "]", "<<", "6", ")", ")", "+", "data", "[", "7", "]", ")", ",", "data", "[", "8", ":", "]", ")" ]
read initial one- .
train
false
7,204
def leakage(Cls, *args, **kwargs): mem = [] for i in range(100): Cls(*args, **kwargs) mem.append(info.getMemoryUsage()) if ((mem[i] - mem[0]) > THRESHOLD): break proportion = (i / 99.0) return round(((mem[i] - mem[0]) / proportion), 1)
[ "def", "leakage", "(", "Cls", ",", "*", "args", ",", "**", "kwargs", ")", ":", "mem", "=", "[", "]", "for", "i", "in", "range", "(", "100", ")", ":", "Cls", "(", "*", "args", ",", "**", "kwargs", ")", "mem", ".", "append", "(", "info", ".", "getMemoryUsage", "(", ")", ")", "if", "(", "(", "mem", "[", "i", "]", "-", "mem", "[", "0", "]", ")", ">", "THRESHOLD", ")", ":", "break", "proportion", "=", "(", "i", "/", "99.0", ")", "return", "round", "(", "(", "(", "mem", "[", "i", "]", "-", "mem", "[", "0", "]", ")", "/", "proportion", ")", ",", "1", ")" ]
make up to 100 instances of cls .
train
false
7,205
def _prep_headers_to_info(headers, server_type): meta = {} sysmeta = {} other = {} for (key, val) in dict(headers).items(): lkey = key.lower() if is_user_meta(server_type, lkey): meta[strip_user_meta_prefix(server_type, lkey)] = val elif is_sys_meta(server_type, lkey): sysmeta[strip_sys_meta_prefix(server_type, lkey)] = val else: other[lkey] = val return (other, meta, sysmeta)
[ "def", "_prep_headers_to_info", "(", "headers", ",", "server_type", ")", ":", "meta", "=", "{", "}", "sysmeta", "=", "{", "}", "other", "=", "{", "}", "for", "(", "key", ",", "val", ")", "in", "dict", "(", "headers", ")", ".", "items", "(", ")", ":", "lkey", "=", "key", ".", "lower", "(", ")", "if", "is_user_meta", "(", "server_type", ",", "lkey", ")", ":", "meta", "[", "strip_user_meta_prefix", "(", "server_type", ",", "lkey", ")", "]", "=", "val", "elif", "is_sys_meta", "(", "server_type", ",", "lkey", ")", ":", "sysmeta", "[", "strip_sys_meta_prefix", "(", "server_type", ",", "lkey", ")", "]", "=", "val", "else", ":", "other", "[", "lkey", "]", "=", "val", "return", "(", "other", ",", "meta", ",", "sysmeta", ")" ]
helper method that iterates once over a dict of headers .
train
false
7,206
def _build_obs_freq_mat(acc_rep_mat): total = float(sum(acc_rep_mat.values())) obs_freq_mat = ObservedFrequencyMatrix(alphabet=acc_rep_mat.alphabet, build_later=1) for i in acc_rep_mat: obs_freq_mat[i] = (acc_rep_mat[i] / total) return obs_freq_mat
[ "def", "_build_obs_freq_mat", "(", "acc_rep_mat", ")", ":", "total", "=", "float", "(", "sum", "(", "acc_rep_mat", ".", "values", "(", ")", ")", ")", "obs_freq_mat", "=", "ObservedFrequencyMatrix", "(", "alphabet", "=", "acc_rep_mat", ".", "alphabet", ",", "build_later", "=", "1", ")", "for", "i", "in", "acc_rep_mat", ":", "obs_freq_mat", "[", "i", "]", "=", "(", "acc_rep_mat", "[", "i", "]", "/", "total", ")", "return", "obs_freq_mat" ]
build_obs_freq_mat: build the observed frequency matrix .
train
false
7,209
@before.all def start_video_server(): video_source_dir = '{}/data/video'.format(settings.TEST_ROOT) video_server = VideoSourceHttpService(port_num=settings.VIDEO_SOURCE_PORT) video_server.config['root_dir'] = video_source_dir world.video_source = video_server
[ "@", "before", ".", "all", "def", "start_video_server", "(", ")", ":", "video_source_dir", "=", "'{}/data/video'", ".", "format", "(", "settings", ".", "TEST_ROOT", ")", "video_server", "=", "VideoSourceHttpService", "(", "port_num", "=", "settings", ".", "VIDEO_SOURCE_PORT", ")", "video_server", ".", "config", "[", "'root_dir'", "]", "=", "video_source_dir", "world", ".", "video_source", "=", "video_server" ]
serve the html5 video sources from a local port .
train
false
7,210
def clean_fsbackend(opts): for backend in ('git', 'hg', 'svn'): if (backend in opts['fileserver_backend']): env_cache = os.path.join(opts['cachedir'], '{0}fs'.format(backend), 'envs.p') if os.path.isfile(env_cache): log.debug('Clearing {0}fs env cache'.format(backend)) try: os.remove(env_cache) except OSError as exc: log.critical('Unable to clear env cache file {0}: {1}'.format(env_cache, exc)) file_lists_dir = os.path.join(opts['cachedir'], 'file_lists', '{0}fs'.format(backend)) try: file_lists_caches = os.listdir(file_lists_dir) except OSError: continue for file_lists_cache in fnmatch.filter(file_lists_caches, '*.p'): cache_file = os.path.join(file_lists_dir, file_lists_cache) try: os.remove(cache_file) except OSError as exc: log.critical('Unable to file_lists cache file {0}: {1}'.format(cache_file, exc))
[ "def", "clean_fsbackend", "(", "opts", ")", ":", "for", "backend", "in", "(", "'git'", ",", "'hg'", ",", "'svn'", ")", ":", "if", "(", "backend", "in", "opts", "[", "'fileserver_backend'", "]", ")", ":", "env_cache", "=", "os", ".", "path", ".", "join", "(", "opts", "[", "'cachedir'", "]", ",", "'{0}fs'", ".", "format", "(", "backend", ")", ",", "'envs.p'", ")", "if", "os", ".", "path", ".", "isfile", "(", "env_cache", ")", ":", "log", ".", "debug", "(", "'Clearing {0}fs env cache'", ".", "format", "(", "backend", ")", ")", "try", ":", "os", ".", "remove", "(", "env_cache", ")", "except", "OSError", "as", "exc", ":", "log", ".", "critical", "(", "'Unable to clear env cache file {0}: {1}'", ".", "format", "(", "env_cache", ",", "exc", ")", ")", "file_lists_dir", "=", "os", ".", "path", ".", "join", "(", "opts", "[", "'cachedir'", "]", ",", "'file_lists'", ",", "'{0}fs'", ".", "format", "(", "backend", ")", ")", "try", ":", "file_lists_caches", "=", "os", ".", "listdir", "(", "file_lists_dir", ")", "except", "OSError", ":", "continue", "for", "file_lists_cache", "in", "fnmatch", ".", "filter", "(", "file_lists_caches", ",", "'*.p'", ")", ":", "cache_file", "=", "os", ".", "path", ".", "join", "(", "file_lists_dir", ",", "file_lists_cache", ")", "try", ":", "os", ".", "remove", "(", "cache_file", ")", "except", "OSError", "as", "exc", ":", "log", ".", "critical", "(", "'Unable to file_lists cache file {0}: {1}'", ".", "format", "(", "cache_file", ",", "exc", ")", ")" ]
clean out the old fileserver backends .
train
true
7,213
@register.simple_tag def get_gravatar(email, size=80, rating='g', default=None, protocol=PROTOCOL): gravatar_protocols = {'http': 'http://www', 'https': 'https://secure'} url = ('%s.gravatar.com/avatar/%s' % (gravatar_protocols[protocol], md5(email.strip().lower().encode('utf-8')).hexdigest())) options = {'s': size, 'r': rating} if default: options['d'] = default url = ('%s?%s' % (url, urlencode(options))) return url.replace('&', '&amp;')
[ "@", "register", ".", "simple_tag", "def", "get_gravatar", "(", "email", ",", "size", "=", "80", ",", "rating", "=", "'g'", ",", "default", "=", "None", ",", "protocol", "=", "PROTOCOL", ")", ":", "gravatar_protocols", "=", "{", "'http'", ":", "'http://www'", ",", "'https'", ":", "'https://secure'", "}", "url", "=", "(", "'%s.gravatar.com/avatar/%s'", "%", "(", "gravatar_protocols", "[", "protocol", "]", ",", "md5", "(", "email", ".", "strip", "(", ")", ".", "lower", "(", ")", ".", "encode", "(", "'utf-8'", ")", ")", ".", "hexdigest", "(", ")", ")", ")", "options", "=", "{", "'s'", ":", "size", ",", "'r'", ":", "rating", "}", "if", "default", ":", "options", "[", "'d'", "]", "=", "default", "url", "=", "(", "'%s?%s'", "%", "(", "url", ",", "urlencode", "(", "options", ")", ")", ")", "return", "url", ".", "replace", "(", "'&'", ",", "'&amp;'", ")" ]
return url for a gravatar .
train
true
7,215
def addIntrospection(xmlrpc): xmlrpc.putSubHandler('system', XMLRPCIntrospection(xmlrpc))
[ "def", "addIntrospection", "(", "xmlrpc", ")", ":", "xmlrpc", ".", "putSubHandler", "(", "'system'", ",", "XMLRPCIntrospection", "(", "xmlrpc", ")", ")" ]
add introspection support to an xmlrpc server .
train
false
7,216
def compute_node_get_all_by_host(context, host): return IMPL.compute_node_get_all_by_host(context, host)
[ "def", "compute_node_get_all_by_host", "(", "context", ",", "host", ")", ":", "return", "IMPL", ".", "compute_node_get_all_by_host", "(", "context", ",", "host", ")" ]
get compute nodes by host name .
train
false
7,217
def _pdfjs_version(): try: (pdfjs_file, file_path) = pdfjs.get_pdfjs_res_and_path('build/pdf.js') except pdfjs.PDFJSNotFound: return 'no' else: pdfjs_file = pdfjs_file.decode('utf-8') version_re = re.compile("^(PDFJS\\.version|var pdfjsVersion) = '([^']+)';$", re.MULTILINE) match = version_re.search(pdfjs_file) if (not match): pdfjs_version = 'unknown' else: pdfjs_version = match.group(2) if (file_path is None): file_path = 'bundled' return '{} ({})'.format(pdfjs_version, file_path)
[ "def", "_pdfjs_version", "(", ")", ":", "try", ":", "(", "pdfjs_file", ",", "file_path", ")", "=", "pdfjs", ".", "get_pdfjs_res_and_path", "(", "'build/pdf.js'", ")", "except", "pdfjs", ".", "PDFJSNotFound", ":", "return", "'no'", "else", ":", "pdfjs_file", "=", "pdfjs_file", ".", "decode", "(", "'utf-8'", ")", "version_re", "=", "re", ".", "compile", "(", "\"^(PDFJS\\\\.version|var pdfjsVersion) = '([^']+)';$\"", ",", "re", ".", "MULTILINE", ")", "match", "=", "version_re", ".", "search", "(", "pdfjs_file", ")", "if", "(", "not", "match", ")", ":", "pdfjs_version", "=", "'unknown'", "else", ":", "pdfjs_version", "=", "match", ".", "group", "(", "2", ")", "if", "(", "file_path", "is", "None", ")", ":", "file_path", "=", "'bundled'", "return", "'{} ({})'", ".", "format", "(", "pdfjs_version", ",", "file_path", ")" ]
get the pdf .
train
false
7,218
def auto_kwargs(function): supported = introspect.arguments(function) @wraps(function) def call_function(*args, **kwargs): return function(*args, **{key: value for (key, value) in kwargs.items() if (key in supported)}) return call_function
[ "def", "auto_kwargs", "(", "function", ")", ":", "supported", "=", "introspect", ".", "arguments", "(", "function", ")", "@", "wraps", "(", "function", ")", "def", "call_function", "(", "*", "args", ",", "**", "kwargs", ")", ":", "return", "function", "(", "*", "args", ",", "**", "{", "key", ":", "value", "for", "(", "key", ",", "value", ")", "in", "kwargs", ".", "items", "(", ")", "if", "(", "key", "in", "supported", ")", "}", ")", "return", "call_function" ]
modifies the provided function to support kwargs by only passing along kwargs for parameters it accepts .
train
true
7,220
def binscope(sample, signature): _check_challenge(signature) binscope_path = [config['binscope']['file']] target = [(config['MobSF']['samples'] + sample)] out_type = ['/Red', '/v'] output = ['/l', (target[0] + '_binscope')] checks = ['/Checks', 'ATLVersionCheck', '/Checks', 'ATLVulnCheck', '/Checks', 'AppContainerCheck', '/Checks', 'CompilerVersionCheck', '/Checks', 'DBCheck', '/Checks', 'DefaultGSCookieCheck', '/Checks', 'ExecutableImportsCheck', '/Checks', 'FunctionPointersCheck', '/Checks', 'GSCheck', '/Checks', 'GSFriendlyInitCheck', '/Checks', 'GSFunctionSafeBuffersCheck', '/Checks', 'HighEntropyVACheck', '/Checks', 'NXCheck', '/Checks', 'RSA32Check', '/Checks', 'SafeSEHCheck', '/Checks', 'SharedSectionCheck', '/Checks', 'VB6Check', '/Checks', 'WXCheck'] params = ((((binscope_path + target) + out_type) + output) + checks) p = subprocess.Popen(subprocess.list2cmdline(params)) p.wait() f = open(output[1]) return f.read()
[ "def", "binscope", "(", "sample", ",", "signature", ")", ":", "_check_challenge", "(", "signature", ")", "binscope_path", "=", "[", "config", "[", "'binscope'", "]", "[", "'file'", "]", "]", "target", "=", "[", "(", "config", "[", "'MobSF'", "]", "[", "'samples'", "]", "+", "sample", ")", "]", "out_type", "=", "[", "'/Red'", ",", "'/v'", "]", "output", "=", "[", "'/l'", ",", "(", "target", "[", "0", "]", "+", "'_binscope'", ")", "]", "checks", "=", "[", "'/Checks'", ",", "'ATLVersionCheck'", ",", "'/Checks'", ",", "'ATLVulnCheck'", ",", "'/Checks'", ",", "'AppContainerCheck'", ",", "'/Checks'", ",", "'CompilerVersionCheck'", ",", "'/Checks'", ",", "'DBCheck'", ",", "'/Checks'", ",", "'DefaultGSCookieCheck'", ",", "'/Checks'", ",", "'ExecutableImportsCheck'", ",", "'/Checks'", ",", "'FunctionPointersCheck'", ",", "'/Checks'", ",", "'GSCheck'", ",", "'/Checks'", ",", "'GSFriendlyInitCheck'", ",", "'/Checks'", ",", "'GSFunctionSafeBuffersCheck'", ",", "'/Checks'", ",", "'HighEntropyVACheck'", ",", "'/Checks'", ",", "'NXCheck'", ",", "'/Checks'", ",", "'RSA32Check'", ",", "'/Checks'", ",", "'SafeSEHCheck'", ",", "'/Checks'", ",", "'SharedSectionCheck'", ",", "'/Checks'", ",", "'VB6Check'", ",", "'/Checks'", ",", "'WXCheck'", "]", "params", "=", "(", "(", "(", "(", "binscope_path", "+", "target", ")", "+", "out_type", ")", "+", "output", ")", "+", "checks", ")", "p", "=", "subprocess", ".", "Popen", "(", "subprocess", ".", "list2cmdline", "(", "params", ")", ")", "p", ".", "wait", "(", ")", "f", "=", "open", "(", "output", "[", "1", "]", ")", "return", "f", ".", "read", "(", ")" ]
run binscope against an sample file .
train
false
7,221
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
set the application wide proxy factory .
train
false
7,222
def port_get_tag(port): cmd = 'ovs-vsctl get port {0} tag'.format(port) result = __salt__['cmd.run_all'](cmd) retcode = result['retcode'] stdout = result['stdout'] return _stdout_list_split(retcode, stdout)
[ "def", "port_get_tag", "(", "port", ")", ":", "cmd", "=", "'ovs-vsctl get port {0} tag'", ".", "format", "(", "port", ")", "result", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ")", "retcode", "=", "result", "[", "'retcode'", "]", "stdout", "=", "result", "[", "'stdout'", "]", "return", "_stdout_list_split", "(", "retcode", ",", "stdout", ")" ]
lists tags of the port .
train
true
7,223
def save_minions(jid, minions, syndic_id=None): cb_ = _get_connection() try: jid_doc = cb_.get(str(jid)) except couchbase.exceptions.NotFoundError: log.warning('Could not write job cache file for jid: {0}'.format(jid)) return False if ('minions' in jid_doc.value): jid_doc.value['minions'] = sorted(set((jid_doc.value['minions'] + minions))) else: jid_doc.value['minions'] = minions cb_.replace(str(jid), jid_doc.value, cas=jid_doc.cas, ttl=_get_ttl())
[ "def", "save_minions", "(", "jid", ",", "minions", ",", "syndic_id", "=", "None", ")", ":", "cb_", "=", "_get_connection", "(", ")", "try", ":", "jid_doc", "=", "cb_", ".", "get", "(", "str", "(", "jid", ")", ")", "except", "couchbase", ".", "exceptions", ".", "NotFoundError", ":", "log", ".", "warning", "(", "'Could not write job cache file for jid: {0}'", ".", "format", "(", "jid", ")", ")", "return", "False", "if", "(", "'minions'", "in", "jid_doc", ".", "value", ")", ":", "jid_doc", ".", "value", "[", "'minions'", "]", "=", "sorted", "(", "set", "(", "(", "jid_doc", ".", "value", "[", "'minions'", "]", "+", "minions", ")", ")", ")", "else", ":", "jid_doc", ".", "value", "[", "'minions'", "]", "=", "minions", "cb_", ".", "replace", "(", "str", "(", "jid", ")", ",", "jid_doc", ".", "value", ",", "cas", "=", "jid_doc", ".", "cas", ",", "ttl", "=", "_get_ttl", "(", ")", ")" ]
included for api consistency .
train
true
7,225
def mimetype(): return Rebulk().rules(Mimetype)
[ "def", "mimetype", "(", ")", ":", "return", "Rebulk", "(", ")", ".", "rules", "(", "Mimetype", ")" ]
function to generate a blaze serialization format mimetype put into a dictionary of headers for consumption by requests .
train
false
7,226
def str642int(string): if (not ((type(string) is types.ListType) or (type(string) is types.StringType))): raise TypeError('You must pass a string or a list') integer = 0 for byte in string: integer *= 64 if (type(byte) is types.StringType): byte = ord(byte) integer += from64(byte) return integer
[ "def", "str642int", "(", "string", ")", ":", "if", "(", "not", "(", "(", "type", "(", "string", ")", "is", "types", ".", "ListType", ")", "or", "(", "type", "(", "string", ")", "is", "types", ".", "StringType", ")", ")", ")", ":", "raise", "TypeError", "(", "'You must pass a string or a list'", ")", "integer", "=", "0", "for", "byte", "in", "string", ":", "integer", "*=", "64", "if", "(", "type", "(", "byte", ")", "is", "types", ".", "StringType", ")", ":", "byte", "=", "ord", "(", "byte", ")", "integer", "+=", "from64", "(", "byte", ")", "return", "integer" ]
converts a base64 encoded string into an integer .
train
false
7,227
def save_environment(directory, cluster, package_source): environment_variables = get_trial_environment(cluster, package_source) environment_strings = list() for environment_variable in environment_variables: environment_strings.append('export {name}={value};\n'.format(name=environment_variable, value=shell_quote(environment_variables[environment_variable]))) environment = ''.join(environment_strings) print 'The following variables describe the cluster:' print environment env_file = directory.child('environment.env') env_file.setContent(environment) print 'The variables are also saved in {}'.format(env_file.path) print 'Be sure to preserve the required files.'
[ "def", "save_environment", "(", "directory", ",", "cluster", ",", "package_source", ")", ":", "environment_variables", "=", "get_trial_environment", "(", "cluster", ",", "package_source", ")", "environment_strings", "=", "list", "(", ")", "for", "environment_variable", "in", "environment_variables", ":", "environment_strings", ".", "append", "(", "'export {name}={value};\\n'", ".", "format", "(", "name", "=", "environment_variable", ",", "value", "=", "shell_quote", "(", "environment_variables", "[", "environment_variable", "]", ")", ")", ")", "environment", "=", "''", ".", "join", "(", "environment_strings", ")", "print", "'The following variables describe the cluster:'", "print", "environment", "env_file", "=", "directory", ".", "child", "(", "'environment.env'", ")", "env_file", ".", "setContent", "(", "environment", ")", "print", "'The variables are also saved in {}'", ".", "format", "(", "env_file", ".", "path", ")", "print", "'Be sure to preserve the required files.'" ]
report environment variables describing the cluster .
train
false
7,228
def is_string_list(value, min=None, max=None): if isinstance(value, string_type): raise VdtTypeError(value) return [is_string(mem) for mem in is_list(value, min, max)]
[ "def", "is_string_list", "(", "value", ",", "min", "=", "None", ",", "max", "=", "None", ")", ":", "if", "isinstance", "(", "value", ",", "string_type", ")", ":", "raise", "VdtTypeError", "(", "value", ")", "return", "[", "is_string", "(", "mem", ")", "for", "mem", "in", "is_list", "(", "value", ",", "min", ",", "max", ")", "]" ]
check that the value is a list of strings .
train
true
7,229
def dir_find(preferred_path=None, suffix=None, raise_if_missing=True): search_paths = _get_search_paths(preferred_path, suffix) for f in search_paths: if os.path.isdir(f): return f if raise_if_missing: raise MissingDataSource(search_paths) else: return False
[ "def", "dir_find", "(", "preferred_path", "=", "None", ",", "suffix", "=", "None", ",", "raise_if_missing", "=", "True", ")", ":", "search_paths", "=", "_get_search_paths", "(", "preferred_path", ",", "suffix", ")", "for", "f", "in", "search_paths", ":", "if", "os", ".", "path", ".", "isdir", "(", "f", ")", ":", "return", "f", "if", "raise_if_missing", ":", "raise", "MissingDataSource", "(", "search_paths", ")", "else", ":", "return", "False" ]
return the path to the user configuration files .
train
false
7,231
def list_url_unsafe_chars(name): reserved_chars = '' for i in name: if (i in URL_RESERVED_CHARS): reserved_chars += i return reserved_chars
[ "def", "list_url_unsafe_chars", "(", "name", ")", ":", "reserved_chars", "=", "''", "for", "i", "in", "name", ":", "if", "(", "i", "in", "URL_RESERVED_CHARS", ")", ":", "reserved_chars", "+=", "i", "return", "reserved_chars" ]
return a list of the reserved characters .
train
false
7,232
def _ensureDir(dir_name): try: os.makedirs(dir_name) except OSError as why: if ((why.errno != EEXIST) or (not os.path.isdir(dir_name))): raise
[ "def", "_ensureDir", "(", "dir_name", ")", ":", "try", ":", "os", ".", "makedirs", "(", "dir_name", ")", "except", "OSError", "as", "why", ":", "if", "(", "(", "why", ".", "errno", "!=", "EEXIST", ")", "or", "(", "not", "os", ".", "path", ".", "isdir", "(", "dir_name", ")", ")", ")", ":", "raise" ]
create dir_name as a directory if it does not exist .
train
true
7,235
def setup_fast_link_builders(top_env): acquire_temp_place = "df | grep tmpfs | awk '{print $5, $6}'" p = subprocess.Popen(acquire_temp_place, env=os.environ, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, universal_newlines=True) (stdout, stderr) = p.communicate() if p.returncode: console.warning('you have link on tmp enabled, but it is not fullfilled to make it.') return if (not stdout): console.warning('you have link on tmp enabled, but there is no tmpfs to make it.') return global linking_tmp_dir (usage, linking_tmp_dir) = tuple(stdout.splitlines(False)[0].split()) usage = int(usage.replace('%', '')) if (usage > 90): console.warning(('you have link on tmp enabled, but there is not enough space on %s to make it.' % linking_tmp_dir)) return console.info('building in link on tmpfs mode') setup_fast_link_sharelib_builder(top_env) setup_fast_link_prog_builder(top_env)
[ "def", "setup_fast_link_builders", "(", "top_env", ")", ":", "acquire_temp_place", "=", "\"df | grep tmpfs | awk '{print $5, $6}'\"", "p", "=", "subprocess", ".", "Popen", "(", "acquire_temp_place", ",", "env", "=", "os", ".", "environ", ",", "stdout", "=", "subprocess", ".", "PIPE", ",", "stderr", "=", "subprocess", ".", "PIPE", ",", "shell", "=", "True", ",", "universal_newlines", "=", "True", ")", "(", "stdout", ",", "stderr", ")", "=", "p", ".", "communicate", "(", ")", "if", "p", ".", "returncode", ":", "console", ".", "warning", "(", "'you have link on tmp enabled, but it is not fullfilled to make it.'", ")", "return", "if", "(", "not", "stdout", ")", ":", "console", ".", "warning", "(", "'you have link on tmp enabled, but there is no tmpfs to make it.'", ")", "return", "global", "linking_tmp_dir", "(", "usage", ",", "linking_tmp_dir", ")", "=", "tuple", "(", "stdout", ".", "splitlines", "(", "False", ")", "[", "0", "]", ".", "split", "(", ")", ")", "usage", "=", "int", "(", "usage", ".", "replace", "(", "'%'", ",", "''", ")", ")", "if", "(", "usage", ">", "90", ")", ":", "console", ".", "warning", "(", "(", "'you have link on tmp enabled, but there is not enough space on %s to make it.'", "%", "linking_tmp_dir", ")", ")", "return", "console", ".", "info", "(", "'building in link on tmpfs mode'", ")", "setup_fast_link_sharelib_builder", "(", "top_env", ")", "setup_fast_link_prog_builder", "(", "top_env", ")" ]
creates fast link builders - program and sharedlibrary .
train
false
7,236
def test_scharr_h_vertical(): (i, j) = np.mgrid[(-5):6, (-5):6] image = (j >= 0).astype(float) result = filters.scharr_h(image) assert_allclose(result, 0)
[ "def", "test_scharr_h_vertical", "(", ")", ":", "(", "i", ",", "j", ")", "=", "np", ".", "mgrid", "[", "(", "-", "5", ")", ":", "6", ",", "(", "-", "5", ")", ":", "6", "]", "image", "=", "(", "j", ">=", "0", ")", ".", "astype", "(", "float", ")", "result", "=", "filters", ".", "scharr_h", "(", "image", ")", "assert_allclose", "(", "result", ",", "0", ")" ]
horizontal scharr on a vertical edge should be zero .
train
false
7,238
def _render_filenames(path, dest, saltenv, template, **kw): if (not template): return (path, dest) if (template not in salt.utils.templates.TEMPLATE_REGISTRY): raise CommandExecutionError('Attempted to render file paths with unavailable engine {0}'.format(template)) kwargs = {} kwargs['salt'] = __salt__ if (('pillarenv' in kw) or ('pillar' in kw)): pillarenv = kw.get('pillarenv', __opts__.get('pillarenv')) kwargs['pillar'] = _gather_pillar(pillarenv, kw.get('pillar')) else: kwargs['pillar'] = __pillar__ kwargs['grains'] = __grains__ kwargs['opts'] = __opts__ kwargs['saltenv'] = saltenv def _render(contents): '\n Render :param:`contents` into a literal pathname by writing it to a\n temp file, rendering that file, and returning the result.\n ' tmp_path_fn = salt.utils.files.mkstemp() with salt.utils.fopen(tmp_path_fn, 'w+') as fp_: fp_.write(contents) data = salt.utils.templates.TEMPLATE_REGISTRY[template](tmp_path_fn, to_str=True, **kwargs) salt.utils.safe_rm(tmp_path_fn) if (not data['result']): raise CommandExecutionError('Failed to render file path with error: {0}'.format(data['data'])) else: return data['data'] path = _render(path) dest = _render(dest) return (path, dest)
[ "def", "_render_filenames", "(", "path", ",", "dest", ",", "saltenv", ",", "template", ",", "**", "kw", ")", ":", "if", "(", "not", "template", ")", ":", "return", "(", "path", ",", "dest", ")", "if", "(", "template", "not", "in", "salt", ".", "utils", ".", "templates", ".", "TEMPLATE_REGISTRY", ")", ":", "raise", "CommandExecutionError", "(", "'Attempted to render file paths with unavailable engine {0}'", ".", "format", "(", "template", ")", ")", "kwargs", "=", "{", "}", "kwargs", "[", "'salt'", "]", "=", "__salt__", "if", "(", "(", "'pillarenv'", "in", "kw", ")", "or", "(", "'pillar'", "in", "kw", ")", ")", ":", "pillarenv", "=", "kw", ".", "get", "(", "'pillarenv'", ",", "__opts__", ".", "get", "(", "'pillarenv'", ")", ")", "kwargs", "[", "'pillar'", "]", "=", "_gather_pillar", "(", "pillarenv", ",", "kw", ".", "get", "(", "'pillar'", ")", ")", "else", ":", "kwargs", "[", "'pillar'", "]", "=", "__pillar__", "kwargs", "[", "'grains'", "]", "=", "__grains__", "kwargs", "[", "'opts'", "]", "=", "__opts__", "kwargs", "[", "'saltenv'", "]", "=", "saltenv", "def", "_render", "(", "contents", ")", ":", "tmp_path_fn", "=", "salt", ".", "utils", ".", "files", ".", "mkstemp", "(", ")", "with", "salt", ".", "utils", ".", "fopen", "(", "tmp_path_fn", ",", "'w+'", ")", "as", "fp_", ":", "fp_", ".", "write", "(", "contents", ")", "data", "=", "salt", ".", "utils", ".", "templates", ".", "TEMPLATE_REGISTRY", "[", "template", "]", "(", "tmp_path_fn", ",", "to_str", "=", "True", ",", "**", "kwargs", ")", "salt", ".", "utils", ".", "safe_rm", "(", "tmp_path_fn", ")", "if", "(", "not", "data", "[", "'result'", "]", ")", ":", "raise", "CommandExecutionError", "(", "'Failed to render file path with error: {0}'", ".", "format", "(", "data", "[", "'data'", "]", ")", ")", "else", ":", "return", "data", "[", "'data'", "]", "path", "=", "_render", "(", "path", ")", "dest", "=", "_render", "(", "dest", ")", "return", "(", "path", ",", "dest", ")" ]
process markup in the .
train
false
7,239
def test_wmape(): skip_if_no_sklearn() trainer = yaml_parse.load(test_yaml) trainer.main_loop() X = trainer.model.get_input_space().make_theano_batch() Y = trainer.model.fprop(X) f = function([X], Y, allow_input_downcast=True) y_hat = f(trainer.dataset.X) wmape_num_exp = abs((trainer.dataset.y - y_hat)).sum() wmape_den_exp = abs(trainer.dataset.y).sum() exp_array = np.asarray([wmape_num_exp, wmape_den_exp]) wmape_num_real = trainer.model.monitor.channels['train_wmape_num'].val_record wmape_den_real = trainer.model.monitor.channels['train_wmape_den'].val_record real_array = np.asarray([wmape_num_real[(-1)], wmape_den_real[(-1)]]) assert_allclose(exp_array, real_array)
[ "def", "test_wmape", "(", ")", ":", "skip_if_no_sklearn", "(", ")", "trainer", "=", "yaml_parse", ".", "load", "(", "test_yaml", ")", "trainer", ".", "main_loop", "(", ")", "X", "=", "trainer", ".", "model", ".", "get_input_space", "(", ")", ".", "make_theano_batch", "(", ")", "Y", "=", "trainer", ".", "model", ".", "fprop", "(", "X", ")", "f", "=", "function", "(", "[", "X", "]", ",", "Y", ",", "allow_input_downcast", "=", "True", ")", "y_hat", "=", "f", "(", "trainer", ".", "dataset", ".", "X", ")", "wmape_num_exp", "=", "abs", "(", "(", "trainer", ".", "dataset", ".", "y", "-", "y_hat", ")", ")", ".", "sum", "(", ")", "wmape_den_exp", "=", "abs", "(", "trainer", ".", "dataset", ".", "y", ")", ".", "sum", "(", ")", "exp_array", "=", "np", ".", "asarray", "(", "[", "wmape_num_exp", ",", "wmape_den_exp", "]", ")", "wmape_num_real", "=", "trainer", ".", "model", ".", "monitor", ".", "channels", "[", "'train_wmape_num'", "]", ".", "val_record", "wmape_den_real", "=", "trainer", ".", "model", ".", "monitor", ".", "channels", "[", "'train_wmape_den'", "]", ".", "val_record", "real_array", "=", "np", ".", "asarray", "(", "[", "wmape_num_real", "[", "(", "-", "1", ")", "]", ",", "wmape_den_real", "[", "(", "-", "1", ")", "]", "]", ")", "assert_allclose", "(", "exp_array", ",", "real_array", ")" ]
test wmapechannel .
train
false
7,240
@login_required @require_http_methods(['GET', 'POST']) def delete_avatar(request, group_slug): prof = get_object_or_404(GroupProfile, slug=group_slug) if (not _user_can_edit(request.user, prof)): raise PermissionDenied if (request.method == 'POST'): if prof.avatar: prof.avatar.delete() return HttpResponseRedirect(prof.get_absolute_url()) return render(request, 'groups/confirm_avatar_delete.html', {'profile': prof})
[ "@", "login_required", "@", "require_http_methods", "(", "[", "'GET'", ",", "'POST'", "]", ")", "def", "delete_avatar", "(", "request", ",", "group_slug", ")", ":", "prof", "=", "get_object_or_404", "(", "GroupProfile", ",", "slug", "=", "group_slug", ")", "if", "(", "not", "_user_can_edit", "(", "request", ".", "user", ",", "prof", ")", ")", ":", "raise", "PermissionDenied", "if", "(", "request", ".", "method", "==", "'POST'", ")", ":", "if", "prof", ".", "avatar", ":", "prof", ".", "avatar", ".", "delete", "(", ")", "return", "HttpResponseRedirect", "(", "prof", ".", "get_absolute_url", "(", ")", ")", "return", "render", "(", "request", ",", "'groups/confirm_avatar_delete.html'", ",", "{", "'profile'", ":", "prof", "}", ")" ]
delete user avatar .
train
false
7,241
def track_movement(): table = s3db.inv_track_item s3db.configure('inv_track_item', create=False, deletable=False, editable=False, listadd=False) def prep(r): if r.interactive: if ('viewing' in get_vars): (dummy, item_id) = get_vars.viewing.split('.') if (item_id != 'None'): query = ((table.send_inv_item_id == item_id) | (table.recv_inv_item_id == item_id)) r.resource.add_filter(query) return True s3.prep = prep output = s3_rest_controller('inv', 'track_item', rheader=s3db.inv_rheader) if ('add_btn' in output): del output['add_btn'] return output
[ "def", "track_movement", "(", ")", ":", "table", "=", "s3db", ".", "inv_track_item", "s3db", ".", "configure", "(", "'inv_track_item'", ",", "create", "=", "False", ",", "deletable", "=", "False", ",", "editable", "=", "False", ",", "listadd", "=", "False", ")", "def", "prep", "(", "r", ")", ":", "if", "r", ".", "interactive", ":", "if", "(", "'viewing'", "in", "get_vars", ")", ":", "(", "dummy", ",", "item_id", ")", "=", "get_vars", ".", "viewing", ".", "split", "(", "'.'", ")", "if", "(", "item_id", "!=", "'None'", ")", ":", "query", "=", "(", "(", "table", ".", "send_inv_item_id", "==", "item_id", ")", "|", "(", "table", ".", "recv_inv_item_id", "==", "item_id", ")", ")", "r", ".", "resource", ".", "add_filter", "(", "query", ")", "return", "True", "s3", ".", "prep", "=", "prep", "output", "=", "s3_rest_controller", "(", "'inv'", ",", "'track_item'", ",", "rheader", "=", "s3db", ".", "inv_rheader", ")", "if", "(", "'add_btn'", "in", "output", ")", ":", "del", "output", "[", "'add_btn'", "]", "return", "output" ]
rest controller .
train
false
7,242
def create_simple_html_directive(name, pre, post, has_content=True, match_titles=False): node_class = type(name.replace('-', '_'), (nodes.General, nodes.Element), {}) def visit_html(self, node): self.body.append(pre) def depart_html(self, node): self.body.append(post) def run_directive(self): node = node_class() if has_content: text = self.content self.state.nested_parse(text, self.content_offset, node, match_titles=match_titles) self.state.document.settings.record_dependencies.add(__file__) return [node] directive_class = type((name.title() + 'Directive'), (Directive,), {'has_content': has_content, 'run': run_directive}) def setup(app): app.add_node(node_class, html=(visit_html, depart_html)) app.add_directive(name, directive_class) return (node_class, directive_class, setup)
[ "def", "create_simple_html_directive", "(", "name", ",", "pre", ",", "post", ",", "has_content", "=", "True", ",", "match_titles", "=", "False", ")", ":", "node_class", "=", "type", "(", "name", ".", "replace", "(", "'-'", ",", "'_'", ")", ",", "(", "nodes", ".", "General", ",", "nodes", ".", "Element", ")", ",", "{", "}", ")", "def", "visit_html", "(", "self", ",", "node", ")", ":", "self", ".", "body", ".", "append", "(", "pre", ")", "def", "depart_html", "(", "self", ",", "node", ")", ":", "self", ".", "body", ".", "append", "(", "post", ")", "def", "run_directive", "(", "self", ")", ":", "node", "=", "node_class", "(", ")", "if", "has_content", ":", "text", "=", "self", ".", "content", "self", ".", "state", ".", "nested_parse", "(", "text", ",", "self", ".", "content_offset", ",", "node", ",", "match_titles", "=", "match_titles", ")", "self", ".", "state", ".", "document", ".", "settings", ".", "record_dependencies", ".", "add", "(", "__file__", ")", "return", "[", "node", "]", "directive_class", "=", "type", "(", "(", "name", ".", "title", "(", ")", "+", "'Directive'", ")", ",", "(", "Directive", ",", ")", ",", "{", "'has_content'", ":", "has_content", ",", "'run'", ":", "run_directive", "}", ")", "def", "setup", "(", "app", ")", ":", "app", ".", "add_node", "(", "node_class", ",", "html", "=", "(", "visit_html", ",", "depart_html", ")", ")", "app", ".", "add_directive", "(", "name", ",", "directive_class", ")", "return", "(", "node_class", ",", "directive_class", ",", "setup", ")" ]
creates a node class .
train
false
7,244
def assert_wolfe(s, phi, derphi, c1=0.0001, c2=0.9, err_msg=''): phi1 = phi(s) phi0 = phi(0) derphi0 = derphi(0) derphi1 = derphi(s) msg = ("s = %s; phi(0) = %s; phi(s) = %s; phi'(0) = %s; phi'(s) = %s; %s" % (s, phi0, phi1, derphi0, derphi1, err_msg)) assert_((phi1 <= (phi0 + ((c1 * s) * derphi0))), ('Wolfe 1 failed: ' + msg)) assert_((abs(derphi1) <= abs((c2 * derphi0))), ('Wolfe 2 failed: ' + msg))
[ "def", "assert_wolfe", "(", "s", ",", "phi", ",", "derphi", ",", "c1", "=", "0.0001", ",", "c2", "=", "0.9", ",", "err_msg", "=", "''", ")", ":", "phi1", "=", "phi", "(", "s", ")", "phi0", "=", "phi", "(", "0", ")", "derphi0", "=", "derphi", "(", "0", ")", "derphi1", "=", "derphi", "(", "s", ")", "msg", "=", "(", "\"s = %s; phi(0) = %s; phi(s) = %s; phi'(0) = %s; phi'(s) = %s; %s\"", "%", "(", "s", ",", "phi0", ",", "phi1", ",", "derphi0", ",", "derphi1", ",", "err_msg", ")", ")", "assert_", "(", "(", "phi1", "<=", "(", "phi0", "+", "(", "(", "c1", "*", "s", ")", "*", "derphi0", ")", ")", ")", ",", "(", "'Wolfe 1 failed: '", "+", "msg", ")", ")", "assert_", "(", "(", "abs", "(", "derphi1", ")", "<=", "abs", "(", "(", "c2", "*", "derphi0", ")", ")", ")", ",", "(", "'Wolfe 2 failed: '", "+", "msg", ")", ")" ]
check that strong wolfe conditions apply .
train
false
7,245
def retention_policy_alter(database, name, duration, replication, default=False, user=None, password=None, host=None, port=None): client = _client(user=user, password=password, host=host, port=port) client.alter_retention_policy(name, database, duration, replication, default) return True
[ "def", "retention_policy_alter", "(", "database", ",", "name", ",", "duration", ",", "replication", ",", "default", "=", "False", ",", "user", "=", "None", ",", "password", "=", "None", ",", "host", "=", "None", ",", "port", "=", "None", ")", ":", "client", "=", "_client", "(", "user", "=", "user", ",", "password", "=", "password", ",", "host", "=", "host", ",", "port", "=", "port", ")", "client", ".", "alter_retention_policy", "(", "name", ",", "database", ",", "duration", ",", "replication", ",", "default", ")", "return", "True" ]
modify an existing retention policy .
train
true
7,246
def allow_ports(ports, proto='tcp', direction='in'): results = [] ports = set(ports) ports = list(ports) proto = proto.upper() direction = direction.upper() _validate_direction_and_proto(direction, proto) ports_csv = ','.join(map(str, ports)) directions = build_directions(direction) for direction in directions: result = __salt__['file.replace']('/etc/csf/csf.conf', pattern='^{0}_{1}(\\ +)?\\=(\\ +)?".*"$'.format(proto, direction), repl='{0}_{1} = "{2}"'.format(proto, direction, ports_csv)) results.append(result) return results
[ "def", "allow_ports", "(", "ports", ",", "proto", "=", "'tcp'", ",", "direction", "=", "'in'", ")", ":", "results", "=", "[", "]", "ports", "=", "set", "(", "ports", ")", "ports", "=", "list", "(", "ports", ")", "proto", "=", "proto", ".", "upper", "(", ")", "direction", "=", "direction", ".", "upper", "(", ")", "_validate_direction_and_proto", "(", "direction", ",", "proto", ")", "ports_csv", "=", "','", ".", "join", "(", "map", "(", "str", ",", "ports", ")", ")", "directions", "=", "build_directions", "(", "direction", ")", "for", "direction", "in", "directions", ":", "result", "=", "__salt__", "[", "'file.replace'", "]", "(", "'/etc/csf/csf.conf'", ",", "pattern", "=", "'^{0}_{1}(\\\\ +)?\\\\=(\\\\ +)?\".*\"$'", ".", "format", "(", "proto", ",", "direction", ")", ",", "repl", "=", "'{0}_{1} = \"{2}\"'", ".", "format", "(", "proto", ",", "direction", ",", "ports_csv", ")", ")", "results", ".", "append", "(", "result", ")", "return", "results" ]
fully replace the incoming or outgoing ports line in the csf .
train
true
7,247
def modifyReject(LowLayerCompatibility_presence=0, HighLayerCompatibility_presence=0): a = TpPd(pd=3) b = MessageType(mesType=19) c = BearerCapability() d = Cause() packet = (((a / b) / c) / d) if (LowLayerCompatibility_presence is 1): e = LowLayerCompatibilityHdr(ieiLLC=124, eightBitLLC=0) packet = (packet / e) if (HighLayerCompatibility_presence is 1): f = HighLayerCompatibilityHdr(ieiHLC=125, eightBitHLC=0) packet = (packet / f) return packet
[ "def", "modifyReject", "(", "LowLayerCompatibility_presence", "=", "0", ",", "HighLayerCompatibility_presence", "=", "0", ")", ":", "a", "=", "TpPd", "(", "pd", "=", "3", ")", "b", "=", "MessageType", "(", "mesType", "=", "19", ")", "c", "=", "BearerCapability", "(", ")", "d", "=", "Cause", "(", ")", "packet", "=", "(", "(", "(", "a", "/", "b", ")", "/", "c", ")", "/", "d", ")", "if", "(", "LowLayerCompatibility_presence", "is", "1", ")", ":", "e", "=", "LowLayerCompatibilityHdr", "(", "ieiLLC", "=", "124", ",", "eightBitLLC", "=", "0", ")", "packet", "=", "(", "packet", "/", "e", ")", "if", "(", "HighLayerCompatibility_presence", "is", "1", ")", ":", "f", "=", "HighLayerCompatibilityHdr", "(", "ieiHLC", "=", "125", ",", "eightBitHLC", "=", "0", ")", "packet", "=", "(", "packet", "/", "f", ")", "return", "packet" ]
modify reject section 9 .
train
true
7,249
def addrow(row1, row2, K): return [(element1 + element2) for (element1, element2) in zip(row1, row2)]
[ "def", "addrow", "(", "row1", ",", "row2", ",", "K", ")", ":", "return", "[", "(", "element1", "+", "element2", ")", "for", "(", "element1", ",", "element2", ")", "in", "zip", "(", "row1", ",", "row2", ")", "]" ]
adds two rows of a matrix element-wise .
train
false
7,251
def elementtree_indent(elem, level=0): i = ('\n' + (level * ' ')) if len(elem): if ((not elem.text) or (not elem.text.strip())): elem.text = (i + ' ') for elem in elem: elementtree_indent(elem, (level + 1)) if ((not elem.tail) or (not elem.tail.strip())): elem.tail = i elif (level and ((not elem.tail) or (not elem.tail.strip()))): elem.tail = i
[ "def", "elementtree_indent", "(", "elem", ",", "level", "=", "0", ")", ":", "i", "=", "(", "'\\n'", "+", "(", "level", "*", "' '", ")", ")", "if", "len", "(", "elem", ")", ":", "if", "(", "(", "not", "elem", ".", "text", ")", "or", "(", "not", "elem", ".", "text", ".", "strip", "(", ")", ")", ")", ":", "elem", ".", "text", "=", "(", "i", "+", "' '", ")", "for", "elem", "in", "elem", ":", "elementtree_indent", "(", "elem", ",", "(", "level", "+", "1", ")", ")", "if", "(", "(", "not", "elem", ".", "tail", ")", "or", "(", "not", "elem", ".", "tail", ".", "strip", "(", ")", ")", ")", ":", "elem", ".", "tail", "=", "i", "elif", "(", "level", "and", "(", "(", "not", "elem", ".", "tail", ")", "or", "(", "not", "elem", ".", "tail", ".", "strip", "(", ")", ")", ")", ")", ":", "elem", ".", "tail", "=", "i" ]
recursive function to indent an elementtree .
train
false
7,252
def backends(user): return user_backends_data(user, get_helper('AUTHENTICATION_BACKENDS'), get_helper('STORAGE', do_import=True))
[ "def", "backends", "(", "user", ")", ":", "return", "user_backends_data", "(", "user", ",", "get_helper", "(", "'AUTHENTICATION_BACKENDS'", ")", ",", "get_helper", "(", "'STORAGE'", ",", "do_import", "=", "True", ")", ")" ]
load social auth current user data to context under the key backends .
train
false
7,253
@must_be_valid_project @must_not_be_registration @must_have_permission(WRITE) def project_reorder_components(node, **kwargs): NodeRelation = apps.get_model('osf.NodeRelation') new_node_guids = [each.split(':') for each in request.get_json().get('new_list', [])] new_node_relation_ids = [(NodeRelation.load(id_).id if (type_ == 'pointer') else node.node_relations.get(child__guids___id=id_).id) for (id_, type_) in new_node_guids] node_relations = node.node_relations.select_related('child').all() valid_node_relation_ids = [each.id for each in node_relations if (not each.child.is_deleted)] deleted_node_relation_ids = [each.id for each in node_relations if each.child.is_deleted] if ((len(valid_node_relation_ids) == len(new_node_guids)) and (set(valid_node_relation_ids) == set(new_node_relation_ids))): node.set_noderelation_order((new_node_relation_ids + deleted_node_relation_ids)) node.save() return {} logger.error('Got invalid node list in reorder components') raise HTTPError(http.BAD_REQUEST)
[ "@", "must_be_valid_project", "@", "must_not_be_registration", "@", "must_have_permission", "(", "WRITE", ")", "def", "project_reorder_components", "(", "node", ",", "**", "kwargs", ")", ":", "NodeRelation", "=", "apps", ".", "get_model", "(", "'osf.NodeRelation'", ")", "new_node_guids", "=", "[", "each", ".", "split", "(", "':'", ")", "for", "each", "in", "request", ".", "get_json", "(", ")", ".", "get", "(", "'new_list'", ",", "[", "]", ")", "]", "new_node_relation_ids", "=", "[", "(", "NodeRelation", ".", "load", "(", "id_", ")", ".", "id", "if", "(", "type_", "==", "'pointer'", ")", "else", "node", ".", "node_relations", ".", "get", "(", "child__guids___id", "=", "id_", ")", ".", "id", ")", "for", "(", "id_", ",", "type_", ")", "in", "new_node_guids", "]", "node_relations", "=", "node", ".", "node_relations", ".", "select_related", "(", "'child'", ")", ".", "all", "(", ")", "valid_node_relation_ids", "=", "[", "each", ".", "id", "for", "each", "in", "node_relations", "if", "(", "not", "each", ".", "child", ".", "is_deleted", ")", "]", "deleted_node_relation_ids", "=", "[", "each", ".", "id", "for", "each", "in", "node_relations", "if", "each", ".", "child", ".", "is_deleted", "]", "if", "(", "(", "len", "(", "valid_node_relation_ids", ")", "==", "len", "(", "new_node_guids", ")", ")", "and", "(", "set", "(", "valid_node_relation_ids", ")", "==", "set", "(", "new_node_relation_ids", ")", ")", ")", ":", "node", ".", "set_noderelation_order", "(", "(", "new_node_relation_ids", "+", "deleted_node_relation_ids", ")", ")", "node", ".", "save", "(", ")", "return", "{", "}", "logger", ".", "error", "(", "'Got invalid node list in reorder components'", ")", "raise", "HTTPError", "(", "http", ".", "BAD_REQUEST", ")" ]
reorders the components in a projects component list .
train
false
7,254
def resources_from_path(instance, path): if ('.' in path): path = path.split('.') else: path = [path] seen = set() nextlevel = set([instance]) first_time = True while nextlevel: thislevel = nextlevel nextlevel = set() if path: relation = path.pop(0) else: relation = None for resource in thislevel: if (resource in seen): continue if first_time: first_time = False else: (yield resource) seen.add(resource) if (relation is not None): if is_like_list(resource, relation): update = nextlevel.update else: update = nextlevel.add update(getattr(resource, relation))
[ "def", "resources_from_path", "(", "instance", ",", "path", ")", ":", "if", "(", "'.'", "in", "path", ")", ":", "path", "=", "path", ".", "split", "(", "'.'", ")", "else", ":", "path", "=", "[", "path", "]", "seen", "=", "set", "(", ")", "nextlevel", "=", "set", "(", "[", "instance", "]", ")", "first_time", "=", "True", "while", "nextlevel", ":", "thislevel", "=", "nextlevel", "nextlevel", "=", "set", "(", ")", "if", "path", ":", "relation", "=", "path", ".", "pop", "(", "0", ")", "else", ":", "relation", "=", "None", "for", "resource", "in", "thislevel", ":", "if", "(", "resource", "in", "seen", ")", ":", "continue", "if", "first_time", ":", "first_time", "=", "False", "else", ":", "(", "yield", "resource", ")", "seen", ".", "add", "(", "resource", ")", "if", "(", "relation", "is", "not", "None", ")", ":", "if", "is_like_list", "(", "resource", ",", "relation", ")", ":", "update", "=", "nextlevel", ".", "update", "else", ":", "update", "=", "nextlevel", ".", "add", "update", "(", "getattr", "(", "resource", ",", "relation", ")", ")" ]
returns an iterable of all resources along the given relationship path for the specified instance of the model .
train
false
7,256
def prompt_and_delete_repo(repo_dir, no_input=False): if no_input: ok_to_delete = True else: question = u"You've cloned {} before. Is it okay to delete and re-clone it?".format(repo_dir) ok_to_delete = read_user_yes_no(question, u'yes') if ok_to_delete: rmtree(repo_dir) else: sys.exit()
[ "def", "prompt_and_delete_repo", "(", "repo_dir", ",", "no_input", "=", "False", ")", ":", "if", "no_input", ":", "ok_to_delete", "=", "True", "else", ":", "question", "=", "u\"You've cloned {} before. Is it okay to delete and re-clone it?\"", ".", "format", "(", "repo_dir", ")", "ok_to_delete", "=", "read_user_yes_no", "(", "question", ",", "u'yes'", ")", "if", "ok_to_delete", ":", "rmtree", "(", "repo_dir", ")", "else", ":", "sys", ".", "exit", "(", ")" ]
ask the user whether its okay to delete the previously-cloned repo .
train
false
7,260
def set_owner(obj_name, principal, obj_type='file'): sid = get_sid(principal) flags = Flags() new_privs = set() luid = win32security.LookupPrivilegeValue('', 'SeTakeOwnershipPrivilege') new_privs.add((luid, win32con.SE_PRIVILEGE_ENABLED)) luid = win32security.LookupPrivilegeValue('', 'SeRestorePrivilege') new_privs.add((luid, win32con.SE_PRIVILEGE_ENABLED)) p_handle = win32api.GetCurrentProcess() t_handle = win32security.OpenProcessToken(p_handle, (win32security.TOKEN_ALL_ACCESS | win32con.TOKEN_ADJUST_PRIVILEGES)) win32security.AdjustTokenPrivileges(t_handle, 0, new_privs) try: win32security.SetNamedSecurityInfo(obj_name, flags.obj_type[obj_type], flags.element['owner'], sid, None, None, None) except pywintypes.error as exc: log.debug('Failed to make {0} the owner: {1}'.format(principal, exc[2])) raise CommandExecutionError('Failed to set owner: {0}'.format(exc[2])) return True
[ "def", "set_owner", "(", "obj_name", ",", "principal", ",", "obj_type", "=", "'file'", ")", ":", "sid", "=", "get_sid", "(", "principal", ")", "flags", "=", "Flags", "(", ")", "new_privs", "=", "set", "(", ")", "luid", "=", "win32security", ".", "LookupPrivilegeValue", "(", "''", ",", "'SeTakeOwnershipPrivilege'", ")", "new_privs", ".", "add", "(", "(", "luid", ",", "win32con", ".", "SE_PRIVILEGE_ENABLED", ")", ")", "luid", "=", "win32security", ".", "LookupPrivilegeValue", "(", "''", ",", "'SeRestorePrivilege'", ")", "new_privs", ".", "add", "(", "(", "luid", ",", "win32con", ".", "SE_PRIVILEGE_ENABLED", ")", ")", "p_handle", "=", "win32api", ".", "GetCurrentProcess", "(", ")", "t_handle", "=", "win32security", ".", "OpenProcessToken", "(", "p_handle", ",", "(", "win32security", ".", "TOKEN_ALL_ACCESS", "|", "win32con", ".", "TOKEN_ADJUST_PRIVILEGES", ")", ")", "win32security", ".", "AdjustTokenPrivileges", "(", "t_handle", ",", "0", ",", "new_privs", ")", "try", ":", "win32security", ".", "SetNamedSecurityInfo", "(", "obj_name", ",", "flags", ".", "obj_type", "[", "obj_type", "]", ",", "flags", ".", "element", "[", "'owner'", "]", ",", "sid", ",", "None", ",", "None", ",", "None", ")", "except", "pywintypes", ".", "error", "as", "exc", ":", "log", ".", "debug", "(", "'Failed to make {0} the owner: {1}'", ".", "format", "(", "principal", ",", "exc", "[", "2", "]", ")", ")", "raise", "CommandExecutionError", "(", "'Failed to set owner: {0}'", ".", "format", "(", "exc", "[", "2", "]", ")", ")", "return", "True" ]
set the owner of an object .
train
false
7,261
def object_type_repr(obj): if (obj is None): return 'None' elif (obj is Ellipsis): return 'Ellipsis' if (obj.__class__.__module__ in ('__builtin__', 'builtins')): name = obj.__class__.__name__ else: name = ((obj.__class__.__module__ + '.') + obj.__class__.__name__) return ('%s object' % name)
[ "def", "object_type_repr", "(", "obj", ")", ":", "if", "(", "obj", "is", "None", ")", ":", "return", "'None'", "elif", "(", "obj", "is", "Ellipsis", ")", ":", "return", "'Ellipsis'", "if", "(", "obj", ".", "__class__", ".", "__module__", "in", "(", "'__builtin__'", ",", "'builtins'", ")", ")", ":", "name", "=", "obj", ".", "__class__", ".", "__name__", "else", ":", "name", "=", "(", "(", "obj", ".", "__class__", ".", "__module__", "+", "'.'", ")", "+", "obj", ".", "__class__", ".", "__name__", ")", "return", "(", "'%s object'", "%", "name", ")" ]
returns the name of the objects type .
train
true
7,263
def get_module_cache(init_args=None): return cmodule.get_module_cache(config.compiledir, init_args=init_args)
[ "def", "get_module_cache", "(", "init_args", "=", "None", ")", ":", "return", "cmodule", ".", "get_module_cache", "(", "config", ".", "compiledir", ",", "init_args", "=", "init_args", ")" ]
create a new module_cache with the pairs in this dictionary parameters init_args if not none .
train
false
7,264
def validate_and_normalize_ip(ip): new_ip = ip.lower() if is_valid_ipv4(new_ip): return new_ip elif is_valid_ipv6(new_ip): return expand_ipv6(new_ip) else: raise ValueError(('Invalid ip %s' % ip))
[ "def", "validate_and_normalize_ip", "(", "ip", ")", ":", "new_ip", "=", "ip", ".", "lower", "(", ")", "if", "is_valid_ipv4", "(", "new_ip", ")", ":", "return", "new_ip", "elif", "is_valid_ipv6", "(", "new_ip", ")", ":", "return", "expand_ipv6", "(", "new_ip", ")", "else", ":", "raise", "ValueError", "(", "(", "'Invalid ip %s'", "%", "ip", ")", ")" ]
return normalized ip if the ip is a valid ip .
train
false
7,266
def prepare_input_source(source, base=''): if (type(source) in _StringTypes): source = xmlreader.InputSource(source) elif hasattr(source, 'read'): f = source source = xmlreader.InputSource() source.setByteStream(f) if hasattr(f, 'name'): source.setSystemId(f.name) if (source.getByteStream() is None): sysid = source.getSystemId() basehead = os.path.dirname(os.path.normpath(base)) sysidfilename = os.path.join(basehead, sysid) if os.path.isfile(sysidfilename): source.setSystemId(sysidfilename) f = open(sysidfilename, 'rb') else: source.setSystemId(urlparse.urljoin(base, sysid)) f = urllib.urlopen(source.getSystemId()) source.setByteStream(f) return source
[ "def", "prepare_input_source", "(", "source", ",", "base", "=", "''", ")", ":", "if", "(", "type", "(", "source", ")", "in", "_StringTypes", ")", ":", "source", "=", "xmlreader", ".", "InputSource", "(", "source", ")", "elif", "hasattr", "(", "source", ",", "'read'", ")", ":", "f", "=", "source", "source", "=", "xmlreader", ".", "InputSource", "(", ")", "source", ".", "setByteStream", "(", "f", ")", "if", "hasattr", "(", "f", ",", "'name'", ")", ":", "source", ".", "setSystemId", "(", "f", ".", "name", ")", "if", "(", "source", ".", "getByteStream", "(", ")", "is", "None", ")", ":", "sysid", "=", "source", ".", "getSystemId", "(", ")", "basehead", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "normpath", "(", "base", ")", ")", "sysidfilename", "=", "os", ".", "path", ".", "join", "(", "basehead", ",", "sysid", ")", "if", "os", ".", "path", ".", "isfile", "(", "sysidfilename", ")", ":", "source", ".", "setSystemId", "(", "sysidfilename", ")", "f", "=", "open", "(", "sysidfilename", ",", "'rb'", ")", "else", ":", "source", ".", "setSystemId", "(", "urlparse", ".", "urljoin", "(", "base", ",", "sysid", ")", ")", "f", "=", "urllib", ".", "urlopen", "(", "source", ".", "getSystemId", "(", ")", ")", "source", ".", "setByteStream", "(", "f", ")", "return", "source" ]
this function takes an inputsource and an optional base url and returns a fully resolved inputsource object ready for reading .
train
false
7,267
def whitespace_normalize_name(name): return ' '.join(name.split())
[ "def", "whitespace_normalize_name", "(", "name", ")", ":", "return", "' '", ".", "join", "(", "name", ".", "split", "(", ")", ")" ]
return a whitespace-normalized name .
train
false
7,269
@register.assignment_tag def shipping_charge_excl_discount(method, basket): return method.calculate_excl_discount(basket)
[ "@", "register", ".", "assignment_tag", "def", "shipping_charge_excl_discount", "(", "method", ",", "basket", ")", ":", "return", "method", ".", "calculate_excl_discount", "(", "basket", ")" ]
template tag for calculating the shipping charge for a given shipping method and basket .
train
false
7,271
def moveFile(srcFile, destFile): try: ek(shutil.move, srcFile, destFile) fixSetGroupID(destFile) except OSError: copyFile(srcFile, destFile) ek(os.unlink, srcFile)
[ "def", "moveFile", "(", "srcFile", ",", "destFile", ")", ":", "try", ":", "ek", "(", "shutil", ".", "move", ",", "srcFile", ",", "destFile", ")", "fixSetGroupID", "(", "destFile", ")", "except", "OSError", ":", "copyFile", "(", "srcFile", ",", "destFile", ")", "ek", "(", "os", ".", "unlink", ",", "srcFile", ")" ]
move a file from source to destination .
train
false
7,272
def yml_to_json(filename): jsonfilename = '{0}.json'.format(*os.path.splitext(filename)) with open(filename, 'r') as f: contents = yaml.load(f) with open(jsonfilename, 'w') as f: json.dump(contents, f)
[ "def", "yml_to_json", "(", "filename", ")", ":", "jsonfilename", "=", "'{0}.json'", ".", "format", "(", "*", "os", ".", "path", ".", "splitext", "(", "filename", ")", ")", "with", "open", "(", "filename", ",", "'r'", ")", "as", "f", ":", "contents", "=", "yaml", ".", "load", "(", "f", ")", "with", "open", "(", "jsonfilename", ",", "'w'", ")", "as", "f", ":", "json", ".", "dump", "(", "contents", ",", "f", ")" ]
convert a .
train
false
7,274
def seasonal_mean(x, freq): return np.array([pd_nanmean(x[i::freq]) for i in range(freq)])
[ "def", "seasonal_mean", "(", "x", ",", "freq", ")", ":", "return", "np", ".", "array", "(", "[", "pd_nanmean", "(", "x", "[", "i", ":", ":", "freq", "]", ")", "for", "i", "in", "range", "(", "freq", ")", "]", ")" ]
return means for each period in x .
train
false