id_within_dataset
int64
1
55.5k
snippet
stringlengths
19
14.2k
tokens
listlengths
6
1.63k
nl
stringlengths
6
352
split_within_dataset
stringclasses
1 value
is_duplicated
bool
2 classes
46,523
def log(repo='.', paths=None, outstream=sys.stdout, max_entries=None, reverse=False, name_status=False): with open_repo_closing(repo) as r: walker = r.get_walker(max_entries=max_entries, paths=paths, reverse=reverse) for entry in walker: decode = (lambda x: commit_decode(entry.commit, x)) print_commit(entry.commit, decode, outstream) if name_status: outstream.writelines([(l + '\n') for l in print_name_status(entry.changes())])
[ "def", "log", "(", "repo", "=", "'.'", ",", "paths", "=", "None", ",", "outstream", "=", "sys", ".", "stdout", ",", "max_entries", "=", "None", ",", "reverse", "=", "False", ",", "name_status", "=", "False", ")", ":", "with", "open_repo_closing", "(", "repo", ")", "as", "r", ":", "walker", "=", "r", ".", "get_walker", "(", "max_entries", "=", "max_entries", ",", "paths", "=", "paths", ",", "reverse", "=", "reverse", ")", "for", "entry", "in", "walker", ":", "decode", "=", "(", "lambda", "x", ":", "commit_decode", "(", "entry", ".", "commit", ",", "x", ")", ")", "print_commit", "(", "entry", ".", "commit", ",", "decode", ",", "outstream", ")", "if", "name_status", ":", "outstream", ".", "writelines", "(", "[", "(", "l", "+", "'\\n'", ")", "for", "l", "in", "print_name_status", "(", "entry", ".", "changes", "(", ")", ")", "]", ")" ]
dump message into log file .
train
false
46,524
def formatwarning(message, category, filename, lineno, line=None): s = ('%s:%s: %s: %s\n' % (filename, lineno, category.__name__, message)) line = (linecache.getline(filename, lineno) if (line is None) else line) if line: line = line.strip() s += (' %s\n' % line) return s
[ "def", "formatwarning", "(", "message", ",", "category", ",", "filename", ",", "lineno", ",", "line", "=", "None", ")", ":", "s", "=", "(", "'%s:%s: %s: %s\\n'", "%", "(", "filename", ",", "lineno", ",", "category", ".", "__name__", ",", "message", ")", ")", "line", "=", "(", "linecache", ".", "getline", "(", "filename", ",", "lineno", ")", "if", "(", "line", "is", "None", ")", "else", "line", ")", "if", "line", ":", "line", "=", "line", ".", "strip", "(", ")", "s", "+=", "(", "' %s\\n'", "%", "line", ")", "return", "s" ]
function to format a warning the standard way .
train
false
46,525
def test_if_marathon_app_can_be_deployed_with_mesos_containerizer(dcos_api_session): (app, test_uuid) = get_test_app() app['container'] = {'type': 'MESOS', 'docker': {'image': 'debian:jessie'}, 'volumes': [{'containerPath': '/opt/mesosphere', 'hostPath': '/opt/mesosphere', 'mode': 'RO'}]} dcos_api_session.marathon.deploy_test_app_and_check(app, test_uuid)
[ "def", "test_if_marathon_app_can_be_deployed_with_mesos_containerizer", "(", "dcos_api_session", ")", ":", "(", "app", ",", "test_uuid", ")", "=", "get_test_app", "(", ")", "app", "[", "'container'", "]", "=", "{", "'type'", ":", "'MESOS'", ",", "'docker'", ":", "{", "'image'", ":", "'debian:jessie'", "}", ",", "'volumes'", ":", "[", "{", "'containerPath'", ":", "'/opt/mesosphere'", ",", "'hostPath'", ":", "'/opt/mesosphere'", ",", "'mode'", ":", "'RO'", "}", "]", "}", "dcos_api_session", ".", "marathon", ".", "deploy_test_app_and_check", "(", "app", ",", "test_uuid", ")" ]
marathon app deployment integration test using the mesos containerizer this test verifies that a marathon app using the mesos containerizer with a docker image can be deployed .
train
false
46,526
def download_object(bucket, obj, out_file, encryption_key, key_hash): service = create_service() request = service.objects().get_media(bucket=bucket, object=obj) request.headers['x-goog-encryption-algorithm'] = 'AES256' request.headers['x-goog-encryption-key'] = encryption_key request.headers['x-goog-encryption-key-sha256'] = key_hash out_file.write(request.execute())
[ "def", "download_object", "(", "bucket", ",", "obj", ",", "out_file", ",", "encryption_key", ",", "key_hash", ")", ":", "service", "=", "create_service", "(", ")", "request", "=", "service", ".", "objects", "(", ")", ".", "get_media", "(", "bucket", "=", "bucket", ",", "object", "=", "obj", ")", "request", ".", "headers", "[", "'x-goog-encryption-algorithm'", "]", "=", "'AES256'", "request", ".", "headers", "[", "'x-goog-encryption-key'", "]", "=", "encryption_key", "request", ".", "headers", "[", "'x-goog-encryption-key-sha256'", "]", "=", "key_hash", "out_file", ".", "write", "(", "request", ".", "execute", "(", ")", ")" ]
downloads an object protected by a custom encryption key .
train
false
46,527
def make_icalculator_tests(calculator_factory): class ICalculatorTests(TestCase, ): '\n Tests of an ``ICalculator`` implementation.\n ' def test_interface(self): '\n The ``ICalculator`` implemention actually implements the interface.\n ' verifyObject(ICalculator, calculator_factory()) @given(discovered_datasets=builds(dataset_map_from_iterable, lists(DISCOVERED_DATASET_STRATEGY)), desired_datasets=builds(dataset_map_from_iterable, lists(DESIRED_DATASET_STRATEGY))) def test_returns_changes(self, discovered_datasets, desired_datasets): '\n ``ICalculator.calculate_changes_for_datasets`` returns a\n ``IStateChange``.\n ' calculator = calculator_factory() changes = calculator.calculate_changes_for_datasets(discovered_datasets=discovered_datasets, desired_datasets=desired_datasets) self.assertTrue(IStateChange.providedBy(changes)) return ICalculatorTests
[ "def", "make_icalculator_tests", "(", "calculator_factory", ")", ":", "class", "ICalculatorTests", "(", "TestCase", ",", ")", ":", "def", "test_interface", "(", "self", ")", ":", "verifyObject", "(", "ICalculator", ",", "calculator_factory", "(", ")", ")", "@", "given", "(", "discovered_datasets", "=", "builds", "(", "dataset_map_from_iterable", ",", "lists", "(", "DISCOVERED_DATASET_STRATEGY", ")", ")", ",", "desired_datasets", "=", "builds", "(", "dataset_map_from_iterable", ",", "lists", "(", "DESIRED_DATASET_STRATEGY", ")", ")", ")", "def", "test_returns_changes", "(", "self", ",", "discovered_datasets", ",", "desired_datasets", ")", ":", "calculator", "=", "calculator_factory", "(", ")", "changes", "=", "calculator", ".", "calculate_changes_for_datasets", "(", "discovered_datasets", "=", "discovered_datasets", ",", "desired_datasets", "=", "desired_datasets", ")", "self", ".", "assertTrue", "(", "IStateChange", ".", "providedBy", "(", "changes", ")", ")", "return", "ICalculatorTests" ]
make a test case to test an icalculator implementation .
train
false
46,531
def _ttest_finish(df, t): prob = (distributions.t.sf(np.abs(t), df) * 2) if (t.ndim == 0): t = t[()] return (t, prob)
[ "def", "_ttest_finish", "(", "df", ",", "t", ")", ":", "prob", "=", "(", "distributions", ".", "t", ".", "sf", "(", "np", ".", "abs", "(", "t", ")", ",", "df", ")", "*", "2", ")", "if", "(", "t", ".", "ndim", "==", "0", ")", ":", "t", "=", "t", "[", "(", ")", "]", "return", "(", "t", ",", "prob", ")" ]
common code between all 3 t-test functions .
train
false
46,532
def get_network_interface_id(name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) r = {} try: enis = conn.get_all_network_interfaces(filters={'tag:Name': name}) if (not enis): r['error'] = {'message': 'No ENIs found.'} elif (len(enis) > 1): r['error'] = {'message': 'Name specified is tagged on multiple ENIs.'} else: eni = enis[0] r['result'] = eni.id except boto.exception.EC2ResponseError as e: r['error'] = __utils__['boto.get_error'](e) return r
[ "def", "get_network_interface_id", "(", "name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "r", "=", "{", "}", "try", ":", "enis", "=", "conn", ".", "get_all_network_interfaces", "(", "filters", "=", "{", "'tag:Name'", ":", "name", "}", ")", "if", "(", "not", "enis", ")", ":", "r", "[", "'error'", "]", "=", "{", "'message'", ":", "'No ENIs found.'", "}", "elif", "(", "len", "(", "enis", ")", ">", "1", ")", ":", "r", "[", "'error'", "]", "=", "{", "'message'", ":", "'Name specified is tagged on multiple ENIs.'", "}", "else", ":", "eni", "=", "enis", "[", "0", "]", "r", "[", "'result'", "]", "=", "eni", ".", "id", "except", "boto", ".", "exception", ".", "EC2ResponseError", "as", "e", ":", "r", "[", "'error'", "]", "=", "__utils__", "[", "'boto.get_error'", "]", "(", "e", ")", "return", "r" ]
get an elastic network interface id from its name tag .
train
true
46,536
def fix_integer_index(context, builder, idxty, idx, size): if idxty.signed: ind = context.cast(builder, idx, idxty, types.intp) ind = slicing.fix_index(builder, ind, size) else: ind = context.cast(builder, idx, idxty, types.uintp) return ind
[ "def", "fix_integer_index", "(", "context", ",", "builder", ",", "idxty", ",", "idx", ",", "size", ")", ":", "if", "idxty", ".", "signed", ":", "ind", "=", "context", ".", "cast", "(", "builder", ",", "idx", ",", "idxty", ",", "types", ".", "intp", ")", "ind", "=", "slicing", ".", "fix_index", "(", "builder", ",", "ind", ",", "size", ")", "else", ":", "ind", "=", "context", ".", "cast", "(", "builder", ",", "idx", ",", "idxty", ",", "types", ".", "uintp", ")", "return", "ind" ]
fix the integer index type and value for the given dimension size .
train
false
46,538
def validate_bom_no(item, bom_no): bom = frappe.get_doc(u'BOM', bom_no) if (not bom.is_active): frappe.throw(_(u'BOM {0} must be active').format(bom_no)) if (bom.docstatus != 1): if (not getattr(frappe.flags, u'in_test', False)): frappe.throw(_(u'BOM {0} must be submitted').format(bom_no)) if (item and (not ((bom.item.lower() == item.lower()) or (bom.item.lower() == cstr(frappe.db.get_value(u'Item', item, u'variant_of')).lower())))): frappe.throw(_(u'BOM {0} does not belong to Item {1}').format(bom_no, item))
[ "def", "validate_bom_no", "(", "item", ",", "bom_no", ")", ":", "bom", "=", "frappe", ".", "get_doc", "(", "u'BOM'", ",", "bom_no", ")", "if", "(", "not", "bom", ".", "is_active", ")", ":", "frappe", ".", "throw", "(", "_", "(", "u'BOM {0} must be active'", ")", ".", "format", "(", "bom_no", ")", ")", "if", "(", "bom", ".", "docstatus", "!=", "1", ")", ":", "if", "(", "not", "getattr", "(", "frappe", ".", "flags", ",", "u'in_test'", ",", "False", ")", ")", ":", "frappe", ".", "throw", "(", "_", "(", "u'BOM {0} must be submitted'", ")", ".", "format", "(", "bom_no", ")", ")", "if", "(", "item", "and", "(", "not", "(", "(", "bom", ".", "item", ".", "lower", "(", ")", "==", "item", ".", "lower", "(", ")", ")", "or", "(", "bom", ".", "item", ".", "lower", "(", ")", "==", "cstr", "(", "frappe", ".", "db", ".", "get_value", "(", "u'Item'", ",", "item", ",", "u'variant_of'", ")", ")", ".", "lower", "(", ")", ")", ")", ")", ")", ":", "frappe", ".", "throw", "(", "_", "(", "u'BOM {0} does not belong to Item {1}'", ")", ".", "format", "(", "bom_no", ",", "item", ")", ")" ]
validate bom no of sub-contracted items .
train
false
46,540
def setup_menus(): global TopMenu TopMenu.add(MPMenuSubMenu('Display', items=[MPMenuItem('Map', 'Map', '# map'), MPMenuItem('Save Graph', 'Save', '# save'), MPMenuItem('Reload Graphs', 'Reload', '# reload')])) TopMenu.add(graph_menus()) TopMenu.add(MPMenuSubMenu('FlightMode', items=flightmode_menu())) mestate.console.set_menu(TopMenu, menu_callback)
[ "def", "setup_menus", "(", ")", ":", "global", "TopMenu", "TopMenu", ".", "add", "(", "MPMenuSubMenu", "(", "'Display'", ",", "items", "=", "[", "MPMenuItem", "(", "'Map'", ",", "'Map'", ",", "'# map'", ")", ",", "MPMenuItem", "(", "'Save Graph'", ",", "'Save'", ",", "'# save'", ")", ",", "MPMenuItem", "(", "'Reload Graphs'", ",", "'Reload'", ",", "'# reload'", ")", "]", ")", ")", "TopMenu", ".", "add", "(", "graph_menus", "(", ")", ")", "TopMenu", ".", "add", "(", "MPMenuSubMenu", "(", "'FlightMode'", ",", "items", "=", "flightmode_menu", "(", ")", ")", ")", "mestate", ".", "console", ".", "set_menu", "(", "TopMenu", ",", "menu_callback", ")" ]
setup console menus .
train
true
46,541
def write_block_to_file(data, lazy_file): binary = ('b' in str(getattr(lazy_file, 'mode', 'b'))) with lazy_file as f: if isinstance(f, io.TextIOWrapper): binary = False if binary: ensure = ensure_bytes else: ensure = ensure_unicode if isinstance(data, (str, bytes, unicode)): f.write(ensure(data)) elif isinstance(data, io.IOBase): out = True while out: out = data.read((64 * (2 ** 10))) f.write(ensure(out)) else: start = False for d in data: if start: if binary: try: f.write('\n') except TypeError: binary = False f.write('\n') else: f.write(u'\n') else: start = True f.write(ensure(d))
[ "def", "write_block_to_file", "(", "data", ",", "lazy_file", ")", ":", "binary", "=", "(", "'b'", "in", "str", "(", "getattr", "(", "lazy_file", ",", "'mode'", ",", "'b'", ")", ")", ")", "with", "lazy_file", "as", "f", ":", "if", "isinstance", "(", "f", ",", "io", ".", "TextIOWrapper", ")", ":", "binary", "=", "False", "if", "binary", ":", "ensure", "=", "ensure_bytes", "else", ":", "ensure", "=", "ensure_unicode", "if", "isinstance", "(", "data", ",", "(", "str", ",", "bytes", ",", "unicode", ")", ")", ":", "f", ".", "write", "(", "ensure", "(", "data", ")", ")", "elif", "isinstance", "(", "data", ",", "io", ".", "IOBase", ")", ":", "out", "=", "True", "while", "out", ":", "out", "=", "data", ".", "read", "(", "(", "64", "*", "(", "2", "**", "10", ")", ")", ")", "f", ".", "write", "(", "ensure", "(", "out", ")", ")", "else", ":", "start", "=", "False", "for", "d", "in", "data", ":", "if", "start", ":", "if", "binary", ":", "try", ":", "f", ".", "write", "(", "'\\n'", ")", "except", "TypeError", ":", "binary", "=", "False", "f", ".", "write", "(", "'\\n'", ")", "else", ":", "f", ".", "write", "(", "u'\\n'", ")", "else", ":", "start", "=", "True", "f", ".", "write", "(", "ensure", "(", "d", ")", ")" ]
parameters data : data to write either str/bytes .
train
false
46,542
def driversOkay(): return (not ('microsoft' in gl_info.get_vendor().lower()))
[ "def", "driversOkay", "(", ")", ":", "return", "(", "not", "(", "'microsoft'", "in", "gl_info", ".", "get_vendor", "(", ")", ".", "lower", "(", ")", ")", ")" ]
returns true if drivers should be okay for psychopy .
train
false
46,543
@LocalContext def run_assembly(assembly): return ELF.from_assembly(assembly).process()
[ "@", "LocalContext", "def", "run_assembly", "(", "assembly", ")", ":", "return", "ELF", ".", "from_assembly", "(", "assembly", ")", ".", "process", "(", ")" ]
given an assembly listing .
train
false
46,545
def check_labels_sampleids(fasta_labels, sample_ids, total_seq_count): valid_id_count = 0 matches_sampleid_count = 0 for label in fasta_labels: curr_label = label.split('_') if (len(curr_label) != 2): continue valid_id_count += 1 if (curr_label[0] in sample_ids): matches_sampleid_count += 1 total_seq_count = float(total_seq_count) valid_id_count = float(valid_id_count) matches_sampleid_count = float(matches_sampleid_count) perc_not_valid = ('%1.3f' % ((total_seq_count - valid_id_count) / total_seq_count)) perc_nosampleid_match = ('%1.3f' % ((total_seq_count - matches_sampleid_count) / total_seq_count)) return (perc_not_valid, perc_nosampleid_match)
[ "def", "check_labels_sampleids", "(", "fasta_labels", ",", "sample_ids", ",", "total_seq_count", ")", ":", "valid_id_count", "=", "0", "matches_sampleid_count", "=", "0", "for", "label", "in", "fasta_labels", ":", "curr_label", "=", "label", ".", "split", "(", "'_'", ")", "if", "(", "len", "(", "curr_label", ")", "!=", "2", ")", ":", "continue", "valid_id_count", "+=", "1", "if", "(", "curr_label", "[", "0", "]", "in", "sample_ids", ")", ":", "matches_sampleid_count", "+=", "1", "total_seq_count", "=", "float", "(", "total_seq_count", ")", "valid_id_count", "=", "float", "(", "valid_id_count", ")", "matches_sampleid_count", "=", "float", "(", "matches_sampleid_count", ")", "perc_not_valid", "=", "(", "'%1.3f'", "%", "(", "(", "total_seq_count", "-", "valid_id_count", ")", "/", "total_seq_count", ")", ")", "perc_nosampleid_match", "=", "(", "'%1.3f'", "%", "(", "(", "total_seq_count", "-", "matches_sampleid_count", ")", "/", "total_seq_count", ")", ")", "return", "(", "perc_not_valid", ",", "perc_nosampleid_match", ")" ]
returns percent of valid fasta labels and that do not match sampleids fasta_labels: list of fasta labels sample_ids: set of sample ids from mapping file total_seq_count: int of total sequences in fasta file .
train
false
46,546
def list_cidr_ips(cidr): ips = netaddr.IPNetwork(cidr) return [str(ip) for ip in list(ips)]
[ "def", "list_cidr_ips", "(", "cidr", ")", ":", "ips", "=", "netaddr", ".", "IPNetwork", "(", "cidr", ")", "return", "[", "str", "(", "ip", ")", "for", "ip", "in", "list", "(", "ips", ")", "]" ]
get a list of ip addresses from a cidr .
train
false
46,547
@mock_ec2 def test_describe_dhcp_options(): conn = boto.connect_vpc(u'the_key', u'the_secret') dhcp_option = conn.create_dhcp_options() dhcp_options = conn.get_all_dhcp_options([dhcp_option.id]) dhcp_options.should.be.length_of(1) dhcp_options = conn.get_all_dhcp_options() dhcp_options.should.be.length_of(1)
[ "@", "mock_ec2", "def", "test_describe_dhcp_options", "(", ")", ":", "conn", "=", "boto", ".", "connect_vpc", "(", "u'the_key'", ",", "u'the_secret'", ")", "dhcp_option", "=", "conn", ".", "create_dhcp_options", "(", ")", "dhcp_options", "=", "conn", ".", "get_all_dhcp_options", "(", "[", "dhcp_option", ".", "id", "]", ")", "dhcp_options", ".", "should", ".", "be", ".", "length_of", "(", "1", ")", "dhcp_options", "=", "conn", ".", "get_all_dhcp_options", "(", ")", "dhcp_options", ".", "should", ".", "be", ".", "length_of", "(", "1", ")" ]
test dhcp options lookup by id .
train
false
46,549
def _make_3bit_errors(veclen=24): errorvecs = [] errorvecs.append(([0] * veclen)) for i in range(veclen): vec = ([0] * veclen) vec[i] = 1 errorvecs.append(vec) for i in range(veclen): for j in range((i + 1), veclen): vec = ([0] * veclen) vec[i] = 1 vec[j] = 1 errorvecs.append(vec) for i in range(veclen): for j in range((i + 1), veclen): for k in range((j + 1), veclen): vec = ([0] * veclen) vec[i] = 1 vec[j] = 1 vec[k] = 1 errorvecs.append(vec) return errorvecs
[ "def", "_make_3bit_errors", "(", "veclen", "=", "24", ")", ":", "errorvecs", "=", "[", "]", "errorvecs", ".", "append", "(", "(", "[", "0", "]", "*", "veclen", ")", ")", "for", "i", "in", "range", "(", "veclen", ")", ":", "vec", "=", "(", "[", "0", "]", "*", "veclen", ")", "vec", "[", "i", "]", "=", "1", "errorvecs", ".", "append", "(", "vec", ")", "for", "i", "in", "range", "(", "veclen", ")", ":", "for", "j", "in", "range", "(", "(", "i", "+", "1", ")", ",", "veclen", ")", ":", "vec", "=", "(", "[", "0", "]", "*", "veclen", ")", "vec", "[", "i", "]", "=", "1", "vec", "[", "j", "]", "=", "1", "errorvecs", ".", "append", "(", "vec", ")", "for", "i", "in", "range", "(", "veclen", ")", ":", "for", "j", "in", "range", "(", "(", "i", "+", "1", ")", ",", "veclen", ")", ":", "for", "k", "in", "range", "(", "(", "j", "+", "1", ")", ",", "veclen", ")", ":", "vec", "=", "(", "[", "0", "]", "*", "veclen", ")", "vec", "[", "i", "]", "=", "1", "vec", "[", "j", "]", "=", "1", "vec", "[", "k", "]", "=", "1", "errorvecs", ".", "append", "(", "vec", ")", "return", "errorvecs" ]
return list of all bitvectors with <= 3 bits as 1s .
train
false
46,551
def _get_gcp_environment_credentials(service_account_email, credentials_file, project_id): if (not service_account_email): service_account_email = _get_gcp_environ_var('GCE_EMAIL', None) if (not credentials_file): credentials_file = (_get_gcp_environ_var('GCE_CREDENTIALS_FILE_PATH', None) or _get_gcp_environ_var('GOOGLE_APPLICATION_CREDENTIALS', None) or _get_gcp_environ_var('GCE_PEM_FILE_PATH', None)) if (not project_id): project_id = (_get_gcp_environ_var('GCE_PROJECT', None) or _get_gcp_environ_var('GOOGLE_CLOUD_PROJECT', None)) return (service_account_email, credentials_file, project_id)
[ "def", "_get_gcp_environment_credentials", "(", "service_account_email", ",", "credentials_file", ",", "project_id", ")", ":", "if", "(", "not", "service_account_email", ")", ":", "service_account_email", "=", "_get_gcp_environ_var", "(", "'GCE_EMAIL'", ",", "None", ")", "if", "(", "not", "credentials_file", ")", ":", "credentials_file", "=", "(", "_get_gcp_environ_var", "(", "'GCE_CREDENTIALS_FILE_PATH'", ",", "None", ")", "or", "_get_gcp_environ_var", "(", "'GOOGLE_APPLICATION_CREDENTIALS'", ",", "None", ")", "or", "_get_gcp_environ_var", "(", "'GCE_PEM_FILE_PATH'", ",", "None", ")", ")", "if", "(", "not", "project_id", ")", ":", "project_id", "=", "(", "_get_gcp_environ_var", "(", "'GCE_PROJECT'", ",", "None", ")", "or", "_get_gcp_environ_var", "(", "'GOOGLE_CLOUD_PROJECT'", ",", "None", ")", ")", "return", "(", "service_account_email", ",", "credentials_file", ",", "project_id", ")" ]
helper to look in environment variables for credentials .
train
false
46,552
def _rectangle_small_p(a, b, eps): ((u, v), (s, t)) = (a, b) if (eps is not None): return (((s - u) < eps) and ((t - v) < eps)) else: return True
[ "def", "_rectangle_small_p", "(", "a", ",", "b", ",", "eps", ")", ":", "(", "(", "u", ",", "v", ")", ",", "(", "s", ",", "t", ")", ")", "=", "(", "a", ",", "b", ")", "if", "(", "eps", "is", "not", "None", ")", ":", "return", "(", "(", "(", "s", "-", "u", ")", "<", "eps", ")", "and", "(", "(", "t", "-", "v", ")", "<", "eps", ")", ")", "else", ":", "return", "True" ]
return true if the given rectangle is small enough .
train
false
46,553
def print_and_exec(cmd): print '\r \r', stdout.flush() print ((((((O + ' [!] ') + W) + 'executing: ') + O) + ' '.join(cmd)) + W), stdout.flush() call(cmd, stdout=DN, stderr=DN) time.sleep(0.1)
[ "def", "print_and_exec", "(", "cmd", ")", ":", "print", "'\\r \\r'", ",", "stdout", ".", "flush", "(", ")", "print", "(", "(", "(", "(", "(", "(", "O", "+", "' [!] '", ")", "+", "W", ")", "+", "'executing: '", ")", "+", "O", ")", "+", "' '", ".", "join", "(", "cmd", ")", ")", "+", "W", ")", ",", "stdout", ".", "flush", "(", ")", "call", "(", "cmd", ",", "stdout", "=", "DN", ",", "stderr", "=", "DN", ")", "time", ".", "sleep", "(", "0.1", ")" ]
prints and executes command "cmd" .
train
false
46,556
def peek(): app = get_app(request.vars.app) filename = '/'.join(request.args) if request.vars.app: path = abspath(filename) else: path = apath(filename, r=request) try: data = safe_read(path).replace('\r', '') except IOError: session.flash = T('file does not exist') redirect(URL('site')) extension = filename[(filename.rfind('.') + 1):].lower() return dict(app=app, filename=filename, data=data, extension=extension)
[ "def", "peek", "(", ")", ":", "app", "=", "get_app", "(", "request", ".", "vars", ".", "app", ")", "filename", "=", "'/'", ".", "join", "(", "request", ".", "args", ")", "if", "request", ".", "vars", ".", "app", ":", "path", "=", "abspath", "(", "filename", ")", "else", ":", "path", "=", "apath", "(", "filename", ",", "r", "=", "request", ")", "try", ":", "data", "=", "safe_read", "(", "path", ")", ".", "replace", "(", "'\\r'", ",", "''", ")", "except", "IOError", ":", "session", ".", "flash", "=", "T", "(", "'file does not exist'", ")", "redirect", "(", "URL", "(", "'site'", ")", ")", "extension", "=", "filename", "[", "(", "filename", ".", "rfind", "(", "'.'", ")", "+", "1", ")", ":", "]", ".", "lower", "(", ")", "return", "dict", "(", "app", "=", "app", ",", "filename", "=", "filename", ",", "data", "=", "data", ",", "extension", "=", "extension", ")" ]
visualize object code .
train
false
46,558
@register.tag def cachedeterministic(parser, token): nodelist = parser.parse(('endcachedeterministic',)) parser.delete_first_token() tokens = token.contents.split() if (len(tokens) != 3): raise TemplateSyntaxError((u"'%r' tag requires 2 arguments." % tokens[0])) return CacheNode(nodelist, tokens[1], tokens[2])
[ "@", "register", ".", "tag", "def", "cachedeterministic", "(", "parser", ",", "token", ")", ":", "nodelist", "=", "parser", ".", "parse", "(", "(", "'endcachedeterministic'", ",", ")", ")", "parser", ".", "delete_first_token", "(", ")", "tokens", "=", "token", ".", "contents", ".", "split", "(", ")", "if", "(", "len", "(", "tokens", ")", "!=", "3", ")", ":", "raise", "TemplateSyntaxError", "(", "(", "u\"'%r' tag requires 2 arguments.\"", "%", "tokens", "[", "0", "]", ")", ")", "return", "CacheNode", "(", "nodelist", ",", "tokens", "[", "1", "]", ",", "tokens", "[", "2", "]", ")" ]
this will cache the contents of a template fragment for a given amount of time .
train
false
46,561
def getRadioPluginsAddPluginFrame(directoryPath, importantFileNames, names, repository): repository.pluginFrame = PluginFrame() radioPlugins = [] for name in names: radioPlugin = RadioPlugin().getFromRadio((name in importantFileNames), repository.pluginFrame.latentStringVar, name, repository, (name == importantFileNames[0])) radioPlugin.updateFunction = repository.pluginFrame.update radioPlugins.append(radioPlugin) defaultRadioButton = getSelectedRadioPlugin((importantFileNames + [radioPlugins[0].name]), radioPlugins) repository.pluginFrame.getFromPath(defaultRadioButton, directoryPath, repository) return radioPlugins
[ "def", "getRadioPluginsAddPluginFrame", "(", "directoryPath", ",", "importantFileNames", ",", "names", ",", "repository", ")", ":", "repository", ".", "pluginFrame", "=", "PluginFrame", "(", ")", "radioPlugins", "=", "[", "]", "for", "name", "in", "names", ":", "radioPlugin", "=", "RadioPlugin", "(", ")", ".", "getFromRadio", "(", "(", "name", "in", "importantFileNames", ")", ",", "repository", ".", "pluginFrame", ".", "latentStringVar", ",", "name", ",", "repository", ",", "(", "name", "==", "importantFileNames", "[", "0", "]", ")", ")", "radioPlugin", ".", "updateFunction", "=", "repository", ".", "pluginFrame", ".", "update", "radioPlugins", ".", "append", "(", "radioPlugin", ")", "defaultRadioButton", "=", "getSelectedRadioPlugin", "(", "(", "importantFileNames", "+", "[", "radioPlugins", "[", "0", "]", ".", "name", "]", ")", ",", "radioPlugins", ")", "repository", ".", "pluginFrame", ".", "getFromPath", "(", "defaultRadioButton", ",", "directoryPath", ",", "repository", ")", "return", "radioPlugins" ]
get the radio plugins and add the plugin frame .
train
false
46,563
def draw_spring(G, **kwargs): draw(G, spring_layout(G), **kwargs)
[ "def", "draw_spring", "(", "G", ",", "**", "kwargs", ")", ":", "draw", "(", "G", ",", "spring_layout", "(", "G", ")", ",", "**", "kwargs", ")" ]
draw the graph g with a spring layout .
train
false
46,564
def _SkipFixed64(buffer, pos, end): pos += 8 if (pos > end): raise _DecodeError('Truncated message.') return pos
[ "def", "_SkipFixed64", "(", "buffer", ",", "pos", ",", "end", ")", ":", "pos", "+=", "8", "if", "(", "pos", ">", "end", ")", ":", "raise", "_DecodeError", "(", "'Truncated message.'", ")", "return", "pos" ]
skip a fixed64 value .
train
false
46,565
def init(mpstate): return SerialModule(mpstate)
[ "def", "init", "(", "mpstate", ")", ":", "return", "SerialModule", "(", "mpstate", ")" ]
initialise module .
train
false
46,568
def compare_multiset_states(s1, s2): (f1, lpart1, pstack1) = s1 (f2, lpart2, pstack2) = s2 if ((lpart1 == lpart2) and (f1[0:(lpart1 + 1)] == f2[0:(lpart2 + 1)])): if (pstack1[0:f1[(lpart1 + 1)]] == pstack2[0:f2[(lpart2 + 1)]]): return True return False
[ "def", "compare_multiset_states", "(", "s1", ",", "s2", ")", ":", "(", "f1", ",", "lpart1", ",", "pstack1", ")", "=", "s1", "(", "f2", ",", "lpart2", ",", "pstack2", ")", "=", "s2", "if", "(", "(", "lpart1", "==", "lpart2", ")", "and", "(", "f1", "[", "0", ":", "(", "lpart1", "+", "1", ")", "]", "==", "f2", "[", "0", ":", "(", "lpart2", "+", "1", ")", "]", ")", ")", ":", "if", "(", "pstack1", "[", "0", ":", "f1", "[", "(", "lpart1", "+", "1", ")", "]", "]", "==", "pstack2", "[", "0", ":", "f2", "[", "(", "lpart2", "+", "1", ")", "]", "]", ")", ":", "return", "True", "return", "False" ]
compare for equality two instances of multiset partition states this is useful for comparing different versions of the algorithm to verify correctness .
train
false
46,569
def _make_specloader(): def to_unicode(s, encoding=None): '\n Raises a TypeError exception if the given string is already unicode.\n\n ' if (encoding is None): encoding = 'ascii' return unicode(s, encoding, 'strict') loader = Loader(file_encoding='ascii', to_unicode=to_unicode) return SpecLoader(loader=loader)
[ "def", "_make_specloader", "(", ")", ":", "def", "to_unicode", "(", "s", ",", "encoding", "=", "None", ")", ":", "if", "(", "encoding", "is", "None", ")", ":", "encoding", "=", "'ascii'", "return", "unicode", "(", "s", ",", "encoding", ",", "'strict'", ")", "loader", "=", "Loader", "(", "file_encoding", "=", "'ascii'", ",", "to_unicode", "=", "to_unicode", ")", "return", "SpecLoader", "(", "loader", "=", "loader", ")" ]
return a default specloader instance for testing purposes .
train
false
46,571
def MakeSuiteFromDict(d, label=None): suite = Suite(label=label) suite.SetDict(d) suite.Normalize() return suite
[ "def", "MakeSuiteFromDict", "(", "d", ",", "label", "=", "None", ")", ":", "suite", "=", "Suite", "(", "label", "=", "label", ")", "suite", ".", "SetDict", "(", "d", ")", "suite", ".", "Normalize", "(", ")", "return", "suite" ]
makes a suite from a map from values to probabilities .
train
false
46,572
def arccot(val): if (numpy.real(val) < 0): return (((- numpy.pi) / 2) - numpy.arctan(val)) else: return ((numpy.pi / 2) - numpy.arctan(val))
[ "def", "arccot", "(", "val", ")", ":", "if", "(", "numpy", ".", "real", "(", "val", ")", "<", "0", ")", ":", "return", "(", "(", "(", "-", "numpy", ".", "pi", ")", "/", "2", ")", "-", "numpy", ".", "arctan", "(", "val", ")", ")", "else", ":", "return", "(", "(", "numpy", ".", "pi", "/", "2", ")", "-", "numpy", ".", "arctan", "(", "val", ")", ")" ]
inverse cotangent .
train
false
46,574
def side_effect_for_get_value(value, return_value): def side_effect(*args, **kwargs): '\n A side effect for tests which returns a value based\n on a given argument otherwise return actual function.\n ' if (args[0] == value): return return_value else: return DEFAULT return side_effect
[ "def", "side_effect_for_get_value", "(", "value", ",", "return_value", ")", ":", "def", "side_effect", "(", "*", "args", ",", "**", "kwargs", ")", ":", "if", "(", "args", "[", "0", "]", "==", "value", ")", ":", "return", "return_value", "else", ":", "return", "DEFAULT", "return", "side_effect" ]
returns a side_effect with given return value for a given value .
train
false
46,577
def p_shift_expression_1(t): pass
[ "def", "p_shift_expression_1", "(", "t", ")", ":", "pass" ]
shift_expression : additive_expression .
train
false
46,578
def save_pyc(filename): cfile = ('%sc' % filename) py_compile.compile(filename, cfile=cfile)
[ "def", "save_pyc", "(", "filename", ")", ":", "cfile", "=", "(", "'%sc'", "%", "filename", ")", "py_compile", ".", "compile", "(", "filename", ",", "cfile", "=", "cfile", ")" ]
bytecode compiles the file filename .
train
false
46,579
def floordiv(a, b): return (a // b)
[ "def", "floordiv", "(", "a", ",", "b", ")", ":", "return", "(", "a", "//", "b", ")" ]
same as a // b .
train
false
46,580
def test_write_csv(tmpdir): t = Table() t.add_column(Column(name='a', data=[1, 2, 3])) t.add_column(Column(name='b', data=['a', 'b', 'c'])) path = str(tmpdir.join('data.csv')) t.write(path)
[ "def", "test_write_csv", "(", "tmpdir", ")", ":", "t", "=", "Table", "(", ")", "t", ".", "add_column", "(", "Column", "(", "name", "=", "'a'", ",", "data", "=", "[", "1", ",", "2", ",", "3", "]", ")", ")", "t", ".", "add_column", "(", "Column", "(", "name", "=", "'b'", ",", "data", "=", "[", "'a'", ",", "'b'", ",", "'c'", "]", ")", ")", "path", "=", "str", "(", "tmpdir", ".", "join", "(", "'data.csv'", ")", ")", "t", ".", "write", "(", "path", ")" ]
if properly registered .
train
false
46,581
def DEFINE_integer(name, default, help, lower_bound=None, upper_bound=None, flag_values=FLAGS, **args): parser = IntegerParser(lower_bound, upper_bound) serializer = ArgumentSerializer() DEFINE(parser, name, default, help, flag_values, serializer, **args) _RegisterBoundsValidatorIfNeeded(parser, name, flag_values=flag_values)
[ "def", "DEFINE_integer", "(", "name", ",", "default", ",", "help", ",", "lower_bound", "=", "None", ",", "upper_bound", "=", "None", ",", "flag_values", "=", "FLAGS", ",", "**", "args", ")", ":", "parser", "=", "IntegerParser", "(", "lower_bound", ",", "upper_bound", ")", "serializer", "=", "ArgumentSerializer", "(", ")", "DEFINE", "(", "parser", ",", "name", ",", "default", ",", "help", ",", "flag_values", ",", "serializer", ",", "**", "args", ")", "_RegisterBoundsValidatorIfNeeded", "(", "parser", ",", "name", ",", "flag_values", "=", "flag_values", ")" ]
registers a flag whose value must be an integer .
train
false
46,582
@api_versions.wraps('2.26') @utils.arg('server', metavar='<server>', help=_('Name or ID of server.')) def do_server_tag_list(cs, args): server = _find_server(cs, args.server) tags = server.tag_list() formatters = {'Tag': (lambda o: o)} utils.print_list(tags, ['Tag'], formatters=formatters)
[ "@", "api_versions", ".", "wraps", "(", "'2.26'", ")", "@", "utils", ".", "arg", "(", "'server'", ",", "metavar", "=", "'<server>'", ",", "help", "=", "_", "(", "'Name or ID of server.'", ")", ")", "def", "do_server_tag_list", "(", "cs", ",", "args", ")", ":", "server", "=", "_find_server", "(", "cs", ",", "args", ".", "server", ")", "tags", "=", "server", ".", "tag_list", "(", ")", "formatters", "=", "{", "'Tag'", ":", "(", "lambda", "o", ":", "o", ")", "}", "utils", ".", "print_list", "(", "tags", ",", "[", "'Tag'", "]", ",", "formatters", "=", "formatters", ")" ]
get list of tags from a server .
train
false
46,583
def _tar(name, root, path=None, compress='bzip2'): if (path is None): path = os.path.join(salt.syspaths.BASE_FILE_ROOTS_DIR, 'img') if (not __salt__['file.directory_exists'](path)): try: __salt__['file.mkdir'](path) except Exception as exc: return {'Error': pprint.pformat(exc)} (compression, ext) = _compress(compress) tarfile = '{0}/{1}.tar.{2}'.format(path, name, ext) out = __salt__['archive.tar'](options='{0}pcf'.format(compression), tarfile=tarfile, sources='.', dest=root)
[ "def", "_tar", "(", "name", ",", "root", ",", "path", "=", "None", ",", "compress", "=", "'bzip2'", ")", ":", "if", "(", "path", "is", "None", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "salt", ".", "syspaths", ".", "BASE_FILE_ROOTS_DIR", ",", "'img'", ")", "if", "(", "not", "__salt__", "[", "'file.directory_exists'", "]", "(", "path", ")", ")", ":", "try", ":", "__salt__", "[", "'file.mkdir'", "]", "(", "path", ")", "except", "Exception", "as", "exc", ":", "return", "{", "'Error'", ":", "pprint", ".", "pformat", "(", "exc", ")", "}", "(", "compression", ",", "ext", ")", "=", "_compress", "(", "compress", ")", "tarfile", "=", "'{0}/{1}.tar.{2}'", ".", "format", "(", "path", ",", "name", ",", "ext", ")", "out", "=", "__salt__", "[", "'archive.tar'", "]", "(", "options", "=", "'{0}pcf'", ".", "format", "(", "compression", ")", ",", "tarfile", "=", "tarfile", ",", "sources", "=", "'.'", ",", "dest", "=", "root", ")" ]
pack up image in a tar format .
train
true
46,584
def normalize_node_id(nid): return ('"%s"' % nid)
[ "def", "normalize_node_id", "(", "nid", ")", ":", "return", "(", "'\"%s\"'", "%", "nid", ")" ]
returns a suitable dot node id for nid .
train
false
46,585
def _append(filename, regex, use_sudo): with hide('stdout', 'warnings'): return append(filename, regex, use_sudo=use_sudo)
[ "def", "_append", "(", "filename", ",", "regex", ",", "use_sudo", ")", ":", "with", "hide", "(", "'stdout'", ",", "'warnings'", ")", ":", "return", "append", "(", "filename", ",", "regex", ",", "use_sudo", "=", "use_sudo", ")" ]
less verbose append .
train
false
46,586
def _get_applicable_vm_recs(xenapi): for vm_ref in call_xenapi(xenapi, 'VM.get_all'): try: vm_rec = call_xenapi(xenapi, 'VM.get_record', vm_ref) except XenAPI.Failure as e: if (e.details[0] != 'HANDLE_INVALID'): raise continue if (vm_rec['is_a_template'] or vm_rec['is_control_domain']): continue (yield (vm_ref, vm_rec))
[ "def", "_get_applicable_vm_recs", "(", "xenapi", ")", ":", "for", "vm_ref", "in", "call_xenapi", "(", "xenapi", ",", "'VM.get_all'", ")", ":", "try", ":", "vm_rec", "=", "call_xenapi", "(", "xenapi", ",", "'VM.get_record'", ",", "vm_ref", ")", "except", "XenAPI", ".", "Failure", "as", "e", ":", "if", "(", "e", ".", "details", "[", "0", "]", "!=", "'HANDLE_INVALID'", ")", ":", "raise", "continue", "if", "(", "vm_rec", "[", "'is_a_template'", "]", "or", "vm_rec", "[", "'is_control_domain'", "]", ")", ":", "continue", "(", "yield", "(", "vm_ref", ",", "vm_rec", ")", ")" ]
an applicable vm is one that is not a template and not the control domain .
train
false
46,587
def filterResults(allResults, reportKeys, optimizeKey=None): optimizeDict = dict() allReportKeys = set() _appendReportKeys(keys=allReportKeys, prefix='', results=allResults) matchingKeys = _matchReportKeys(reportKeys, allReportKeys) reportDict = dict() for keyName in matchingKeys: value = _getReportItem(keyName, allResults) reportDict[keyName] = value if (optimizeKey is not None): matchingKeys = _matchReportKeys([optimizeKey], allReportKeys) if (len(matchingKeys) == 0): raise _BadKeyError(optimizeKey) elif (len(matchingKeys) > 1): raise _BadOptimizeKeyError(optimizeKey, matchingKeys) optimizeKeyFullName = matchingKeys[0] value = _getReportItem(optimizeKeyFullName, allResults) optimizeDict[optimizeKeyFullName] = value reportDict[optimizeKeyFullName] = value return (reportDict, optimizeDict)
[ "def", "filterResults", "(", "allResults", ",", "reportKeys", ",", "optimizeKey", "=", "None", ")", ":", "optimizeDict", "=", "dict", "(", ")", "allReportKeys", "=", "set", "(", ")", "_appendReportKeys", "(", "keys", "=", "allReportKeys", ",", "prefix", "=", "''", ",", "results", "=", "allResults", ")", "matchingKeys", "=", "_matchReportKeys", "(", "reportKeys", ",", "allReportKeys", ")", "reportDict", "=", "dict", "(", ")", "for", "keyName", "in", "matchingKeys", ":", "value", "=", "_getReportItem", "(", "keyName", ",", "allResults", ")", "reportDict", "[", "keyName", "]", "=", "value", "if", "(", "optimizeKey", "is", "not", "None", ")", ":", "matchingKeys", "=", "_matchReportKeys", "(", "[", "optimizeKey", "]", ",", "allReportKeys", ")", "if", "(", "len", "(", "matchingKeys", ")", "==", "0", ")", ":", "raise", "_BadKeyError", "(", "optimizeKey", ")", "elif", "(", "len", "(", "matchingKeys", ")", ">", "1", ")", ":", "raise", "_BadOptimizeKeyError", "(", "optimizeKey", ",", "matchingKeys", ")", "optimizeKeyFullName", "=", "matchingKeys", "[", "0", "]", "value", "=", "_getReportItem", "(", "optimizeKeyFullName", ",", "allResults", ")", "optimizeDict", "[", "optimizeKeyFullName", "]", "=", "value", "reportDict", "[", "optimizeKeyFullName", "]", "=", "value", "return", "(", "reportDict", ",", "optimizeDict", ")" ]
given the complete set of results generated by an experiment .
train
true
46,588
@handle_response_format @treeio_login_required def index_categories(request, response_format='html'): transactions = Object.filter_by_request(request, Transaction.objects) liabilities = Object.filter_by_request(request, Liability.objects) categories = Object.filter_by_request(request, Category.objects) return render_to_response('finance/index_categories', {'categories': categories, 'transactions': transactions, 'liabilities': liabilities}, context_instance=RequestContext(request), response_format=response_format)
[ "@", "handle_response_format", "@", "treeio_login_required", "def", "index_categories", "(", "request", ",", "response_format", "=", "'html'", ")", ":", "transactions", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Transaction", ".", "objects", ")", "liabilities", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Liability", ".", "objects", ")", "categories", "=", "Object", ".", "filter_by_request", "(", "request", ",", "Category", ".", "objects", ")", "return", "render_to_response", "(", "'finance/index_categories'", ",", "{", "'categories'", ":", "categories", ",", "'transactions'", ":", "transactions", ",", "'liabilities'", ":", "liabilities", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", "response_format", "=", "response_format", ")" ]
knowledge base categories page .
train
false
46,590
def interlink_translated_content(generator): inspector = GeneratorInspector(generator) for content in inspector.all_contents(): interlink_translations(content)
[ "def", "interlink_translated_content", "(", "generator", ")", ":", "inspector", "=", "GeneratorInspector", "(", "generator", ")", "for", "content", "in", "inspector", ".", "all_contents", "(", ")", ":", "interlink_translations", "(", "content", ")" ]
make translations link to the native locations for generators that may contain translated content .
train
true
46,591
def frame_is_relevant(entity, frame): if (frame.get('source_addr_long') != entity.config.address): return False if ('samples' not in frame): return False return True
[ "def", "frame_is_relevant", "(", "entity", ",", "frame", ")", ":", "if", "(", "frame", ".", "get", "(", "'source_addr_long'", ")", "!=", "entity", ".", "config", ".", "address", ")", ":", "return", "False", "if", "(", "'samples'", "not", "in", "frame", ")", ":", "return", "False", "return", "True" ]
test whether the frame is relevant to the entity .
train
false
46,595
def register_create_keys_from_csr_arguments(session, argument_table, **kwargs): argument_table['certificate-pem-outfile'] = QueryOutFileArgument(session=session, name='certificate-pem-outfile', query='certificatePem', after_call_event='after-call.iot.CreateCertificateFromCsr', perm=384)
[ "def", "register_create_keys_from_csr_arguments", "(", "session", ",", "argument_table", ",", "**", "kwargs", ")", ":", "argument_table", "[", "'certificate-pem-outfile'", "]", "=", "QueryOutFileArgument", "(", "session", "=", "session", ",", "name", "=", "'certificate-pem-outfile'", ",", "query", "=", "'certificatePem'", ",", "after_call_event", "=", "'after-call.iot.CreateCertificateFromCsr'", ",", "perm", "=", "384", ")" ]
add certificate-pem-outfile to create-certificate-from-csr .
train
false
46,596
@must_have_addon(SHORT_NAME, 'user') @must_have_addon(SHORT_NAME, 'node') def owncloud_folder_list(node_addon, user_addon, **kwargs): path = request.args.get('path') return node_addon.get_folders(path=path)
[ "@", "must_have_addon", "(", "SHORT_NAME", ",", "'user'", ")", "@", "must_have_addon", "(", "SHORT_NAME", ",", "'node'", ")", "def", "owncloud_folder_list", "(", "node_addon", ",", "user_addon", ",", "**", "kwargs", ")", ":", "path", "=", "request", ".", "args", ".", "get", "(", "'path'", ")", "return", "node_addon", ".", "get_folders", "(", "path", "=", "path", ")" ]
returns all the subsequent folders under the folder id passed .
train
false
46,597
@pytest.mark.parametrize('invalid_value', [None, [], (), 69, 69L]) def test_parse_multistring_invalid(invalid_value): with pytest.raises(ValueError): parse_multistring(invalid_value)
[ "@", "pytest", ".", "mark", ".", "parametrize", "(", "'invalid_value'", ",", "[", "None", ",", "[", "]", ",", "(", ")", ",", "69", ",", "69", "L", "]", ")", "def", "test_parse_multistring_invalid", "(", "invalid_value", ")", ":", "with", "pytest", ".", "raises", "(", "ValueError", ")", ":", "parse_multistring", "(", "invalid_value", ")" ]
tests parsing doesnt support non-string values .
train
false
46,598
def delete_group_policy(group_name, policy_name, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if (not conn): return False _policy = get_group_policy(group_name, policy_name, region, key, keyid, profile) if (not _policy): return True try: conn.delete_group_policy(group_name, policy_name) msg = 'Successfully deleted {0} policy for group {1}.' log.info(msg.format(policy_name, group_name)) return True except boto.exception.BotoServerError as e: log.debug(e) msg = 'Failed to delete {0} policy for group {1}.' log.error(msg.format(policy_name, group_name)) return False
[ "def", "delete_group_policy", "(", "group_name", ",", "policy_name", ",", "region", "=", "None", ",", "key", "=", "None", ",", "keyid", "=", "None", ",", "profile", "=", "None", ")", ":", "conn", "=", "_get_conn", "(", "region", "=", "region", ",", "key", "=", "key", ",", "keyid", "=", "keyid", ",", "profile", "=", "profile", ")", "if", "(", "not", "conn", ")", ":", "return", "False", "_policy", "=", "get_group_policy", "(", "group_name", ",", "policy_name", ",", "region", ",", "key", ",", "keyid", ",", "profile", ")", "if", "(", "not", "_policy", ")", ":", "return", "True", "try", ":", "conn", ".", "delete_group_policy", "(", "group_name", ",", "policy_name", ")", "msg", "=", "'Successfully deleted {0} policy for group {1}.'", "log", ".", "info", "(", "msg", ".", "format", "(", "policy_name", ",", "group_name", ")", ")", "return", "True", "except", "boto", ".", "exception", ".", "BotoServerError", "as", "e", ":", "log", ".", "debug", "(", "e", ")", "msg", "=", "'Failed to delete {0} policy for group {1}.'", "log", ".", "error", "(", "msg", ".", "format", "(", "policy_name", ",", "group_name", ")", ")", "return", "False" ]
delete a group policy .
train
true
46,601
@testing.requires_testing_data @requires_nibabel() def test_render_mri_without_bem(): tempdir = _TempDir() os.mkdir(op.join(tempdir, 'sample')) os.mkdir(op.join(tempdir, 'sample', 'mri')) shutil.copyfile(mri_fname, op.join(tempdir, 'sample', 'mri', 'T1.mgz')) report = Report(info_fname=raw_fname, subject='sample', subjects_dir=tempdir) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('always') report.parse_folder(tempdir) assert_true((len(w) >= 1)) report.save(op.join(tempdir, 'report.html'), open_browser=False)
[ "@", "testing", ".", "requires_testing_data", "@", "requires_nibabel", "(", ")", "def", "test_render_mri_without_bem", "(", ")", ":", "tempdir", "=", "_TempDir", "(", ")", "os", ".", "mkdir", "(", "op", ".", "join", "(", "tempdir", ",", "'sample'", ")", ")", "os", ".", "mkdir", "(", "op", ".", "join", "(", "tempdir", ",", "'sample'", ",", "'mri'", ")", ")", "shutil", ".", "copyfile", "(", "mri_fname", ",", "op", ".", "join", "(", "tempdir", ",", "'sample'", ",", "'mri'", ",", "'T1.mgz'", ")", ")", "report", "=", "Report", "(", "info_fname", "=", "raw_fname", ",", "subject", "=", "'sample'", ",", "subjects_dir", "=", "tempdir", ")", "with", "warnings", ".", "catch_warnings", "(", "record", "=", "True", ")", "as", "w", ":", "warnings", ".", "simplefilter", "(", "'always'", ")", "report", ".", "parse_folder", "(", "tempdir", ")", "assert_true", "(", "(", "len", "(", "w", ")", ">=", "1", ")", ")", "report", ".", "save", "(", "op", ".", "join", "(", "tempdir", ",", "'report.html'", ")", ",", "open_browser", "=", "False", ")" ]
test rendering mri without bem for mne report .
train
false
46,603
def gen_calendars(start, stop, critical_dates): all_dates = pd.date_range(start, stop, tz='utc') for to_drop in map(list, powerset(critical_dates)): (yield (all_dates.drop(to_drop),)) trading_days = get_calendar('NYSE').all_days (yield (trading_days[trading_days.slice_indexer(start, stop)],))
[ "def", "gen_calendars", "(", "start", ",", "stop", ",", "critical_dates", ")", ":", "all_dates", "=", "pd", ".", "date_range", "(", "start", ",", "stop", ",", "tz", "=", "'utc'", ")", "for", "to_drop", "in", "map", "(", "list", ",", "powerset", "(", "critical_dates", ")", ")", ":", "(", "yield", "(", "all_dates", ".", "drop", "(", "to_drop", ")", ",", ")", ")", "trading_days", "=", "get_calendar", "(", "'NYSE'", ")", ".", "all_days", "(", "yield", "(", "trading_days", "[", "trading_days", ".", "slice_indexer", "(", "start", ",", "stop", ")", "]", ",", ")", ")" ]
generate calendars to use as inputs .
train
false
46,604
def inst_detail(retry_count=3, pause=0.001): ct._write_head() df = _inst_detail(pageNo=1, retry_count=retry_count, pause=pause) if (len(df) > 0): df['code'] = df['code'].map((lambda x: str(x).zfill(6))) return df
[ "def", "inst_detail", "(", "retry_count", "=", "3", ",", "pause", "=", "0.001", ")", ":", "ct", ".", "_write_head", "(", ")", "df", "=", "_inst_detail", "(", "pageNo", "=", "1", ",", "retry_count", "=", "retry_count", ",", "pause", "=", "pause", ")", "if", "(", "len", "(", "df", ")", ">", "0", ")", ":", "df", "[", "'code'", "]", "=", "df", "[", "'code'", "]", ".", "map", "(", "(", "lambda", "x", ":", "str", "(", "x", ")", ".", "zfill", "(", "6", ")", ")", ")", "return", "df" ]
parameters retry_count : int .
train
false
46,605
def _validate_max_staleness(max_staleness): if (max_staleness == (-1)): return (-1) if (not isinstance(max_staleness, integer_types)): raise TypeError(_invalid_max_staleness_msg(max_staleness)) if (max_staleness <= 0): raise ValueError(_invalid_max_staleness_msg(max_staleness)) return max_staleness
[ "def", "_validate_max_staleness", "(", "max_staleness", ")", ":", "if", "(", "max_staleness", "==", "(", "-", "1", ")", ")", ":", "return", "(", "-", "1", ")", "if", "(", "not", "isinstance", "(", "max_staleness", ",", "integer_types", ")", ")", ":", "raise", "TypeError", "(", "_invalid_max_staleness_msg", "(", "max_staleness", ")", ")", "if", "(", "max_staleness", "<=", "0", ")", ":", "raise", "ValueError", "(", "_invalid_max_staleness_msg", "(", "max_staleness", ")", ")", "return", "max_staleness" ]
validate max_staleness .
train
true
46,607
def ensure_ndarray(ndarray_or_adjusted_array): if isinstance(ndarray_or_adjusted_array, ndarray): return ndarray_or_adjusted_array elif isinstance(ndarray_or_adjusted_array, AdjustedArray): return ndarray_or_adjusted_array.data else: raise TypeError(("Can't convert %s to ndarray" % type(ndarray_or_adjusted_array).__name__))
[ "def", "ensure_ndarray", "(", "ndarray_or_adjusted_array", ")", ":", "if", "isinstance", "(", "ndarray_or_adjusted_array", ",", "ndarray", ")", ":", "return", "ndarray_or_adjusted_array", "elif", "isinstance", "(", "ndarray_or_adjusted_array", ",", "AdjustedArray", ")", ":", "return", "ndarray_or_adjusted_array", ".", "data", "else", ":", "raise", "TypeError", "(", "(", "\"Can't convert %s to ndarray\"", "%", "type", "(", "ndarray_or_adjusted_array", ")", ".", "__name__", ")", ")" ]
return the input as a numpy ndarray .
train
true
46,608
def render_authors(authors): for x in authors: x.setdefault(u'email', u'') entries = [(u'\n <p>\n <strong>%(name)s</strong><br>\n <em>%(title)s</em><br>\n %(email)s\n </p>\n ' % author) for author in authors] return u''.join(entries)
[ "def", "render_authors", "(", "authors", ")", ":", "for", "x", "in", "authors", ":", "x", ".", "setdefault", "(", "u'email'", ",", "u''", ")", "entries", "=", "[", "(", "u'\\n <p>\\n <strong>%(name)s</strong><br>\\n <em>%(title)s</em><br>\\n %(email)s\\n </p>\\n '", "%", "author", ")", "for", "author", "in", "authors", "]", "return", "u''", ".", "join", "(", "entries", ")" ]
render a list of author details into richtext html .
train
false
46,609
def _close_to_dt(d1, d2, epsilon=5): delta = (d2 - d1) mus = abs((((delta.days * MUSECONDS_PER_DAY) + (delta.seconds * 1000000.0)) + delta.microseconds)) assert (mus < epsilon)
[ "def", "_close_to_dt", "(", "d1", ",", "d2", ",", "epsilon", "=", "5", ")", ":", "delta", "=", "(", "d2", "-", "d1", ")", "mus", "=", "abs", "(", "(", "(", "(", "delta", ".", "days", "*", "MUSECONDS_PER_DAY", ")", "+", "(", "delta", ".", "seconds", "*", "1000000.0", ")", ")", "+", "delta", ".", "microseconds", ")", ")", "assert", "(", "mus", "<", "epsilon", ")" ]
assert that datetimes *d1* and *d2* are within *epsilon* microseconds .
train
false
46,611
def _include_profile_image(requested_fields): return (requested_fields and ('profile_image' in requested_fields))
[ "def", "_include_profile_image", "(", "requested_fields", ")", ":", "return", "(", "requested_fields", "and", "(", "'profile_image'", "in", "requested_fields", ")", ")" ]
returns true if requested_fields list has profile_image entity else false .
train
false
46,612
def parse_yahoo_historical_ohlc(fh, adjusted=True, asobject=False): return _parse_yahoo_historical(fh, adjusted=adjusted, asobject=asobject, ochl=False)
[ "def", "parse_yahoo_historical_ohlc", "(", "fh", ",", "adjusted", "=", "True", ",", "asobject", "=", "False", ")", ":", "return", "_parse_yahoo_historical", "(", "fh", ",", "adjusted", "=", "adjusted", ",", "asobject", "=", "asobject", ",", "ochl", "=", "False", ")" ]
parse the historical data in file handle fh from yahoo finance .
train
false
46,614
def freeze(bin_env=None, user=None, cwd=None, use_vt=False): pip_bin = _get_pip_bin(bin_env) cmd = [pip_bin, 'freeze'] min_version = '8.0.3' cur_version = version(bin_env) if (not salt.utils.compare_versions(ver1=cur_version, oper='>=', ver2=min_version)): logger.warning('The version of pip installed is {0}, which is older than {1}. The packages pip, wheel, setuptools, and distribute will not be included in the output of pip.freeze'.format(cur_version, min_version)) else: cmd.append('--all') cmd_kwargs = dict(runas=user, cwd=cwd, use_vt=use_vt, python_shell=False) if (bin_env and os.path.isdir(bin_env)): cmd_kwargs['env'] = {'VIRTUAL_ENV': bin_env} result = __salt__['cmd.run_all'](cmd, **cmd_kwargs) if (result['retcode'] > 0): raise CommandExecutionError(result['stderr']) return result['stdout'].splitlines()
[ "def", "freeze", "(", "bin_env", "=", "None", ",", "user", "=", "None", ",", "cwd", "=", "None", ",", "use_vt", "=", "False", ")", ":", "pip_bin", "=", "_get_pip_bin", "(", "bin_env", ")", "cmd", "=", "[", "pip_bin", ",", "'freeze'", "]", "min_version", "=", "'8.0.3'", "cur_version", "=", "version", "(", "bin_env", ")", "if", "(", "not", "salt", ".", "utils", ".", "compare_versions", "(", "ver1", "=", "cur_version", ",", "oper", "=", "'>='", ",", "ver2", "=", "min_version", ")", ")", ":", "logger", ".", "warning", "(", "'The version of pip installed is {0}, which is older than {1}. The packages pip, wheel, setuptools, and distribute will not be included in the output of pip.freeze'", ".", "format", "(", "cur_version", ",", "min_version", ")", ")", "else", ":", "cmd", ".", "append", "(", "'--all'", ")", "cmd_kwargs", "=", "dict", "(", "runas", "=", "user", ",", "cwd", "=", "cwd", ",", "use_vt", "=", "use_vt", ",", "python_shell", "=", "False", ")", "if", "(", "bin_env", "and", "os", ".", "path", ".", "isdir", "(", "bin_env", ")", ")", ":", "cmd_kwargs", "[", "'env'", "]", "=", "{", "'VIRTUAL_ENV'", ":", "bin_env", "}", "result", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "**", "cmd_kwargs", ")", "if", "(", "result", "[", "'retcode'", "]", ">", "0", ")", ":", "raise", "CommandExecutionError", "(", "result", "[", "'stderr'", "]", ")", "return", "result", "[", "'stdout'", "]", ".", "splitlines", "(", ")" ]
modify graph to prevent further change by adding or removing nodes or edges .
train
false
46,615
def pm(): dbg = Debugger(sys.last_traceback) dbg.start()
[ "def", "pm", "(", ")", ":", "dbg", "=", "Debugger", "(", "sys", ".", "last_traceback", ")", "dbg", ".", "start", "(", ")" ]
use our custom debugger .
train
false
46,616
def posixize_path(rel_path): return rel_path.replace(os.sep, u'/')
[ "def", "posixize_path", "(", "rel_path", ")", ":", "return", "rel_path", ".", "replace", "(", "os", ".", "sep", ",", "u'/'", ")" ]
use / as path separator .
train
false
46,618
def _get_cache_key(shop): return (str(u'%s:%s') % (_SHOP_CONF_NAMESPACE, (shop.pk if shop else 0)))
[ "def", "_get_cache_key", "(", "shop", ")", ":", "return", "(", "str", "(", "u'%s:%s'", ")", "%", "(", "_SHOP_CONF_NAMESPACE", ",", "(", "shop", ".", "pk", "if", "shop", "else", "0", ")", ")", ")" ]
get global or shop specific cache key .
train
false
46,619
def resource_create_object(request, resource_cls, uri): (resource_name, matchdict) = view_lookup(request, uri) fakerequest = build_request(request, {'method': 'PUT', 'path': uri}) fakerequest.matchdict = matchdict fakerequest.bound_data = request.bound_data fakerequest.authn_type = request.authn_type fakerequest.selected_userid = request.selected_userid fakerequest.errors = request.errors fakerequest.current_resource_name = resource_name obj_id = matchdict['id'] context = RouteFactory(fakerequest) context.resource_name = resource_name resource = resource_cls(fakerequest, context) if (not resource.model.id_generator.match(obj_id)): error_details = {'location': 'path', 'description': ('Invalid %s id' % resource_name)} raise_invalid(resource.request, **error_details) data = {'id': obj_id} try: obj = resource.model.create_record(data) resource.postprocess(data, action=ACTIONS.CREATE) except storage_exceptions.UnicityError as e: obj = e.record return obj
[ "def", "resource_create_object", "(", "request", ",", "resource_cls", ",", "uri", ")", ":", "(", "resource_name", ",", "matchdict", ")", "=", "view_lookup", "(", "request", ",", "uri", ")", "fakerequest", "=", "build_request", "(", "request", ",", "{", "'method'", ":", "'PUT'", ",", "'path'", ":", "uri", "}", ")", "fakerequest", ".", "matchdict", "=", "matchdict", "fakerequest", ".", "bound_data", "=", "request", ".", "bound_data", "fakerequest", ".", "authn_type", "=", "request", ".", "authn_type", "fakerequest", ".", "selected_userid", "=", "request", ".", "selected_userid", "fakerequest", ".", "errors", "=", "request", ".", "errors", "fakerequest", ".", "current_resource_name", "=", "resource_name", "obj_id", "=", "matchdict", "[", "'id'", "]", "context", "=", "RouteFactory", "(", "fakerequest", ")", "context", ".", "resource_name", "=", "resource_name", "resource", "=", "resource_cls", "(", "fakerequest", ",", "context", ")", "if", "(", "not", "resource", ".", "model", ".", "id_generator", ".", "match", "(", "obj_id", ")", ")", ":", "error_details", "=", "{", "'location'", ":", "'path'", ",", "'description'", ":", "(", "'Invalid %s id'", "%", "resource_name", ")", "}", "raise_invalid", "(", "resource", ".", "request", ",", "**", "error_details", ")", "data", "=", "{", "'id'", ":", "obj_id", "}", "try", ":", "obj", "=", "resource", ".", "model", ".", "create_record", "(", "data", ")", "resource", ".", "postprocess", "(", "data", ",", "action", "=", "ACTIONS", ".", "CREATE", ")", "except", "storage_exceptions", ".", "UnicityError", "as", "e", ":", "obj", "=", "e", ".", "record", "return", "obj" ]
in the default bucket .
train
false
46,620
def test_example1(): code = Function(vert_template) t1 = Function(transformScale) t2 = Function(transformZOffset) t3 = Function(transformScale) pos = Variable('attribute vec4 a_position') code['position'] = t1(t2(pos)) code['correction'] = t1(pos) code['endtransform'] = t3 code['nlights'] = '4' t1['scale'] = t2 t3['scale'] = (3.0, 4.0, 5.0) t2['offset'] = '1.0' code2 = Function(frag_template) code2['color'] = Varying('v_position') code['gl_PointSize'] = '3.0' code[code2['color']] = pos print code
[ "def", "test_example1", "(", ")", ":", "code", "=", "Function", "(", "vert_template", ")", "t1", "=", "Function", "(", "transformScale", ")", "t2", "=", "Function", "(", "transformZOffset", ")", "t3", "=", "Function", "(", "transformScale", ")", "pos", "=", "Variable", "(", "'attribute vec4 a_position'", ")", "code", "[", "'position'", "]", "=", "t1", "(", "t2", "(", "pos", ")", ")", "code", "[", "'correction'", "]", "=", "t1", "(", "pos", ")", "code", "[", "'endtransform'", "]", "=", "t3", "code", "[", "'nlights'", "]", "=", "'4'", "t1", "[", "'scale'", "]", "=", "t2", "t3", "[", "'scale'", "]", "=", "(", "3.0", ",", "4.0", ",", "5.0", ")", "t2", "[", "'offset'", "]", "=", "'1.0'", "code2", "=", "Function", "(", "frag_template", ")", "code2", "[", "'color'", "]", "=", "Varying", "(", "'v_position'", ")", "code", "[", "'gl_PointSize'", "]", "=", "'3.0'", "code", "[", "code2", "[", "'color'", "]", "]", "=", "pos", "print", "code" ]
just a few simple compositions .
train
false
46,621
def _sentences(text): delimiters = (u'[%s]' % u''.join([(u'\\' + d) for d in _SENTENCE_DELIMITERS])) sentences = re.split(delimiters, text.strip()) return [s.strip() for s in sentences if s.strip()]
[ "def", "_sentences", "(", "text", ")", ":", "delimiters", "=", "(", "u'[%s]'", "%", "u''", ".", "join", "(", "[", "(", "u'\\\\'", "+", "d", ")", "for", "d", "in", "_SENTENCE_DELIMITERS", "]", ")", ")", "sentences", "=", "re", ".", "split", "(", "delimiters", ",", "text", ".", "strip", "(", ")", ")", "return", "[", "s", ".", "strip", "(", ")", "for", "s", "in", "sentences", "if", "s", ".", "strip", "(", ")", "]" ]
splits a piece of text into sentences .
train
false
46,622
def imprint(env, input_arr): env.generate_input_data = (lambda _: input_arr)
[ "def", "imprint", "(", "env", ",", "input_arr", ")", ":", "env", ".", "generate_input_data", "=", "(", "lambda", "_", ":", "input_arr", ")" ]
monkey-patch the given environment so that when reset() is called .
train
false
46,623
def genome_generator(): return MutableSeq('1234', TestAlphabet())
[ "def", "genome_generator", "(", ")", ":", "return", "MutableSeq", "(", "'1234'", ",", "TestAlphabet", "(", ")", ")" ]
generate a genome for testing purposes .
train
false
46,625
def is_ST3(): return (sys.version_info >= (3, 0))
[ "def", "is_ST3", "(", ")", ":", "return", "(", "sys", ".", "version_info", ">=", "(", "3", ",", "0", ")", ")" ]
check if st3 based on python version .
train
false
46,626
def register_specify_shape_c_code(typ, code, version=(), c_support_code_apply=None): SpecifyShape.c_code_and_version[typ] = (code, version, c_support_code_apply)
[ "def", "register_specify_shape_c_code", "(", "typ", ",", "code", ",", "version", "=", "(", ")", ",", "c_support_code_apply", "=", "None", ")", ":", "SpecifyShape", ".", "c_code_and_version", "[", "typ", "]", "=", "(", "code", ",", "version", ",", "c_support_code_apply", ")" ]
tell specifyshape how to generate c code for a theano type .
train
false
46,628
def get_cache(): return requests.Session().cache
[ "def", "get_cache", "(", ")", ":", "return", "requests", ".", "Session", "(", ")", ".", "cache" ]
function to load a cache backend dynamically .
train
false
46,629
def validate_cum_func_with_skipna(skipna, args, kwargs, name): if (not is_bool(skipna)): args = ((skipna,) + args) skipna = True validate_cum_func(args, kwargs, fname=name) return skipna
[ "def", "validate_cum_func_with_skipna", "(", "skipna", ",", "args", ",", "kwargs", ",", "name", ")", ":", "if", "(", "not", "is_bool", "(", "skipna", ")", ")", ":", "args", "=", "(", "(", "skipna", ",", ")", "+", "args", ")", "skipna", "=", "True", "validate_cum_func", "(", "args", ",", "kwargs", ",", "fname", "=", "name", ")", "return", "skipna" ]
if this function is called via the numpy library .
train
true
46,630
def show_quota(tenant_id, profile=None): conn = _auth(profile) return conn.show_quota(tenant_id)
[ "def", "show_quota", "(", "tenant_id", ",", "profile", "=", "None", ")", ":", "conn", "=", "_auth", "(", "profile", ")", "return", "conn", ".", "show_quota", "(", "tenant_id", ")" ]
fetches information of a certain tenants quotas cli example: .
train
false
46,631
def _WX28_clipped_agg_as_bitmap(agg, bbox): (l, b, width, height) = bbox.get_bounds() r = (l + width) t = (b + height) srcBmp = wx.BitmapFromBufferRGBA(int(agg.width), int(agg.height), agg.buffer_rgba(0, 0)) srcDC = wx.MemoryDC() srcDC.SelectObject(srcBmp) destBmp = wx.EmptyBitmap(int(width), int(height)) destDC = wx.MemoryDC() destDC.SelectObject(destBmp) destDC.BeginDrawing() x = int(l) y = int((int(agg.height) - t)) destDC.Blit(0, 0, int(width), int(height), srcDC, x, y) destDC.EndDrawing() srcDC.SelectObject(wx.NullBitmap) destDC.SelectObject(wx.NullBitmap) return destBmp
[ "def", "_WX28_clipped_agg_as_bitmap", "(", "agg", ",", "bbox", ")", ":", "(", "l", ",", "b", ",", "width", ",", "height", ")", "=", "bbox", ".", "get_bounds", "(", ")", "r", "=", "(", "l", "+", "width", ")", "t", "=", "(", "b", "+", "height", ")", "srcBmp", "=", "wx", ".", "BitmapFromBufferRGBA", "(", "int", "(", "agg", ".", "width", ")", ",", "int", "(", "agg", ".", "height", ")", ",", "agg", ".", "buffer_rgba", "(", "0", ",", "0", ")", ")", "srcDC", "=", "wx", ".", "MemoryDC", "(", ")", "srcDC", ".", "SelectObject", "(", "srcBmp", ")", "destBmp", "=", "wx", ".", "EmptyBitmap", "(", "int", "(", "width", ")", ",", "int", "(", "height", ")", ")", "destDC", "=", "wx", ".", "MemoryDC", "(", ")", "destDC", ".", "SelectObject", "(", "destBmp", ")", "destDC", ".", "BeginDrawing", "(", ")", "x", "=", "int", "(", "l", ")", "y", "=", "int", "(", "(", "int", "(", "agg", ".", "height", ")", "-", "t", ")", ")", "destDC", ".", "Blit", "(", "0", ",", "0", ",", "int", "(", "width", ")", ",", "int", "(", "height", ")", ",", "srcDC", ",", "x", ",", "y", ")", "destDC", ".", "EndDrawing", "(", ")", "srcDC", ".", "SelectObject", "(", "wx", ".", "NullBitmap", ")", "destDC", ".", "SelectObject", "(", "wx", ".", "NullBitmap", ")", "return", "destBmp" ]
convert the region of a the agg buffer bounded by bbox to a wx .
train
true
46,632
def refactor(source, fixer_names, ignore=None, filename=u''): from lib2to3 import pgen2 try: new_text = refactor_with_2to3(source, fixer_names=fixer_names, filename=filename) except (pgen2.parse.ParseError, SyntaxError, UnicodeDecodeError, UnicodeEncodeError): return source if ignore: if ((ignore in new_text) and (ignore not in source)): return source return new_text
[ "def", "refactor", "(", "source", ",", "fixer_names", ",", "ignore", "=", "None", ",", "filename", "=", "u''", ")", ":", "from", "lib2to3", "import", "pgen2", "try", ":", "new_text", "=", "refactor_with_2to3", "(", "source", ",", "fixer_names", "=", "fixer_names", ",", "filename", "=", "filename", ")", "except", "(", "pgen2", ".", "parse", ".", "ParseError", ",", "SyntaxError", ",", "UnicodeDecodeError", ",", "UnicodeEncodeError", ")", ":", "return", "source", "if", "ignore", ":", "if", "(", "(", "ignore", "in", "new_text", ")", "and", "(", "ignore", "not", "in", "source", ")", ")", ":", "return", "source", "return", "new_text" ]
return refactored code using lib2to3 .
train
true
46,633
def acl_checks(user): return Check.objects.filter(project_id__in=Project.objects.get_acl_ids(user))
[ "def", "acl_checks", "(", "user", ")", ":", "return", "Check", ".", "objects", ".", "filter", "(", "project_id__in", "=", "Project", ".", "objects", ".", "get_acl_ids", "(", "user", ")", ")" ]
filter checks by acl .
train
false
46,635
def xrdf(request, template_name='authopenid/yadis.xrdf'): url_host = get_url_host(request) return_to = [('%s%s' % (url_host, reverse('user_complete_signin')))] response = render(template_name, {'return_to': return_to}, context_instance=RequestContext(request)) response['Content-Type'] = 'application/xrds+xml' response['X-XRDS-Location'] = request.build_absolute_uri(reverse('oid_xrdf')) return response
[ "def", "xrdf", "(", "request", ",", "template_name", "=", "'authopenid/yadis.xrdf'", ")", ":", "url_host", "=", "get_url_host", "(", "request", ")", "return_to", "=", "[", "(", "'%s%s'", "%", "(", "url_host", ",", "reverse", "(", "'user_complete_signin'", ")", ")", ")", "]", "response", "=", "render", "(", "template_name", ",", "{", "'return_to'", ":", "return_to", "}", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ")", "response", "[", "'Content-Type'", "]", "=", "'application/xrds+xml'", "response", "[", "'X-XRDS-Location'", "]", "=", "request", ".", "build_absolute_uri", "(", "reverse", "(", "'oid_xrdf'", ")", ")", "return", "response" ]
view used to process the xrdf file .
train
false
46,638
def OpenKey(key, sub_key): regopenkeyex = advapi32['RegOpenKeyExW'] regopenkeyex.restype = ctypes.c_long regopenkeyex.argtypes = [ctypes.c_void_p, ctypes.c_wchar_p, ctypes.c_ulong, ctypes.c_ulong, ctypes.POINTER(ctypes.c_void_p)] new_key = KeyHandle() rc = regopenkeyex(key.handle, sub_key, 0, KEY_READ, ctypes.cast(ctypes.byref(new_key.handle), ctypes.POINTER(ctypes.c_void_p))) if (rc != ERROR_SUCCESS): raise ctypes.WinError(2) return new_key
[ "def", "OpenKey", "(", "key", ",", "sub_key", ")", ":", "regopenkeyex", "=", "advapi32", "[", "'RegOpenKeyExW'", "]", "regopenkeyex", ".", "restype", "=", "ctypes", ".", "c_long", "regopenkeyex", ".", "argtypes", "=", "[", "ctypes", ".", "c_void_p", ",", "ctypes", ".", "c_wchar_p", ",", "ctypes", ".", "c_ulong", ",", "ctypes", ".", "c_ulong", ",", "ctypes", ".", "POINTER", "(", "ctypes", ".", "c_void_p", ")", "]", "new_key", "=", "KeyHandle", "(", ")", "rc", "=", "regopenkeyex", "(", "key", ".", "handle", ",", "sub_key", ",", "0", ",", "KEY_READ", ",", "ctypes", ".", "cast", "(", "ctypes", ".", "byref", "(", "new_key", ".", "handle", ")", ",", "ctypes", ".", "POINTER", "(", "ctypes", ".", "c_void_p", ")", ")", ")", "if", "(", "rc", "!=", "ERROR_SUCCESS", ")", ":", "raise", "ctypes", ".", "WinError", "(", "2", ")", "return", "new_key" ]
this calls the windows openkeyex function in a unicode safe way .
train
true
46,644
def get_colors(palette, funcs): palettes = import_required('bokeh.palettes', _BOKEH_MISSING_MSG) tz = import_required('toolz', _TOOLZ_MISSING_MSG) unique_funcs = list(sorted(tz.unique(funcs))) n_funcs = len(unique_funcs) palette_lookup = palettes.all_palettes[palette] keys = list(sorted(palette_lookup.keys())) index = keys[min(bisect_left(keys, n_funcs), (len(keys) - 1))] palette = palette_lookup[index] palette = list(tz.unique(palette)) if (len(palette) > n_funcs): random.Random(42).shuffle(palette) color_lookup = dict(zip(unique_funcs, cycle(palette))) return [color_lookup[n] for n in funcs]
[ "def", "get_colors", "(", "palette", ",", "funcs", ")", ":", "palettes", "=", "import_required", "(", "'bokeh.palettes'", ",", "_BOKEH_MISSING_MSG", ")", "tz", "=", "import_required", "(", "'toolz'", ",", "_TOOLZ_MISSING_MSG", ")", "unique_funcs", "=", "list", "(", "sorted", "(", "tz", ".", "unique", "(", "funcs", ")", ")", ")", "n_funcs", "=", "len", "(", "unique_funcs", ")", "palette_lookup", "=", "palettes", ".", "all_palettes", "[", "palette", "]", "keys", "=", "list", "(", "sorted", "(", "palette_lookup", ".", "keys", "(", ")", ")", ")", "index", "=", "keys", "[", "min", "(", "bisect_left", "(", "keys", ",", "n_funcs", ")", ",", "(", "len", "(", "keys", ")", "-", "1", ")", ")", "]", "palette", "=", "palette_lookup", "[", "index", "]", "palette", "=", "list", "(", "tz", ".", "unique", "(", "palette", ")", ")", "if", "(", "len", "(", "palette", ")", ">", "n_funcs", ")", ":", "random", ".", "Random", "(", "42", ")", ".", "shuffle", "(", "palette", ")", "color_lookup", "=", "dict", "(", "zip", "(", "unique_funcs", ",", "cycle", "(", "palette", ")", ")", ")", "return", "[", "color_lookup", "[", "n", "]", "for", "n", "in", "funcs", "]" ]
returns k colours selected by the colorspiral object .
train
false
46,645
def django_find_root_dir(): from ...config import CONF manage_py = CONF['main_script'] manage_dir = os.path.dirname(os.path.abspath(manage_py)) settings_dir = None files = set(os.listdir(manage_dir)) if ((('settings.py' in files) or ('settings' in files)) and ('urls.py' in files)): settings_dir = manage_dir else: for f in files: if os.path.isdir(os.path.join(manage_dir, f)): subfiles = os.listdir(os.path.join(manage_dir, f)) if ((('settings.py' in subfiles) or ('settings' in subfiles)) and ('urls.py' in subfiles)): settings_dir = os.path.join(manage_dir, f) break return settings_dir
[ "def", "django_find_root_dir", "(", ")", ":", "from", "...", "config", "import", "CONF", "manage_py", "=", "CONF", "[", "'main_script'", "]", "manage_dir", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "manage_py", ")", ")", "settings_dir", "=", "None", "files", "=", "set", "(", "os", ".", "listdir", "(", "manage_dir", ")", ")", "if", "(", "(", "(", "'settings.py'", "in", "files", ")", "or", "(", "'settings'", "in", "files", ")", ")", "and", "(", "'urls.py'", "in", "files", ")", ")", ":", "settings_dir", "=", "manage_dir", "else", ":", "for", "f", "in", "files", ":", "if", "os", ".", "path", ".", "isdir", "(", "os", ".", "path", ".", "join", "(", "manage_dir", ",", "f", ")", ")", ":", "subfiles", "=", "os", ".", "listdir", "(", "os", ".", "path", ".", "join", "(", "manage_dir", ",", "f", ")", ")", "if", "(", "(", "(", "'settings.py'", "in", "subfiles", ")", "or", "(", "'settings'", "in", "subfiles", ")", ")", "and", "(", "'urls.py'", "in", "subfiles", ")", ")", ":", "settings_dir", "=", "os", ".", "path", ".", "join", "(", "manage_dir", ",", "f", ")", "break", "return", "settings_dir" ]
return path to directory that contains main django files .
train
false
46,646
def not_system_specific_scripts(system=None): all_scripts = [fil for syst in ALL_SYSTEMS for fil in system_specific_scripts(syst)] return list((set(all_scripts) - set(system_specific_scripts(system))))
[ "def", "not_system_specific_scripts", "(", "system", "=", "None", ")", ":", "all_scripts", "=", "[", "fil", "for", "syst", "in", "ALL_SYSTEMS", "for", "fil", "in", "system_specific_scripts", "(", "syst", ")", "]", "return", "list", "(", "(", "set", "(", "all_scripts", ")", "-", "set", "(", "system_specific_scripts", "(", "system", ")", ")", ")", ")" ]
returns a list of all platform-specific scripts that are on other systems and not on this one .
train
false
46,647
def create_datacenter(kwargs=None, call=None): if (call != 'function'): raise SaltCloudSystemExit('The create_datacenter function must be called with -f or --function.') datacenter_name = (kwargs.get('name') if (kwargs and ('name' in kwargs)) else None) if (not datacenter_name): raise SaltCloudSystemExit('You must specify name of the new datacenter to be created.') if ((len(datacenter_name) >= 80) or (len(datacenter_name) <= 0)): raise SaltCloudSystemExit('The datacenter name must be a non empty string of less than 80 characters.') si = _get_si() datacenter_ref = salt.utils.vmware.get_mor_by_property(si, vim.Datacenter, datacenter_name) if datacenter_ref: return {datacenter_name: 'datacenter already exists'} folder = si.content.rootFolder if isinstance(folder, vim.Folder): try: folder.CreateDatacenter(name=datacenter_name) except Exception as exc: log.error('Error creating datacenter {0}: {1}'.format(datacenter_name, exc), exc_info_on_loglevel=logging.DEBUG) return False log.debug('Created datacenter {0}'.format(datacenter_name)) return {datacenter_name: 'created'} return False
[ "def", "create_datacenter", "(", "kwargs", "=", "None", ",", "call", "=", "None", ")", ":", "if", "(", "call", "!=", "'function'", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The create_datacenter function must be called with -f or --function.'", ")", "datacenter_name", "=", "(", "kwargs", ".", "get", "(", "'name'", ")", "if", "(", "kwargs", "and", "(", "'name'", "in", "kwargs", ")", ")", "else", "None", ")", "if", "(", "not", "datacenter_name", ")", ":", "raise", "SaltCloudSystemExit", "(", "'You must specify name of the new datacenter to be created.'", ")", "if", "(", "(", "len", "(", "datacenter_name", ")", ">=", "80", ")", "or", "(", "len", "(", "datacenter_name", ")", "<=", "0", ")", ")", ":", "raise", "SaltCloudSystemExit", "(", "'The datacenter name must be a non empty string of less than 80 characters.'", ")", "si", "=", "_get_si", "(", ")", "datacenter_ref", "=", "salt", ".", "utils", ".", "vmware", ".", "get_mor_by_property", "(", "si", ",", "vim", ".", "Datacenter", ",", "datacenter_name", ")", "if", "datacenter_ref", ":", "return", "{", "datacenter_name", ":", "'datacenter already exists'", "}", "folder", "=", "si", ".", "content", ".", "rootFolder", "if", "isinstance", "(", "folder", ",", "vim", ".", "Folder", ")", ":", "try", ":", "folder", ".", "CreateDatacenter", "(", "name", "=", "datacenter_name", ")", "except", "Exception", "as", "exc", ":", "log", ".", "error", "(", "'Error creating datacenter {0}: {1}'", ".", "format", "(", "datacenter_name", ",", "exc", ")", ",", "exc_info_on_loglevel", "=", "logging", ".", "DEBUG", ")", "return", "False", "log", ".", "debug", "(", "'Created datacenter {0}'", ".", "format", "(", "datacenter_name", ")", ")", "return", "{", "datacenter_name", ":", "'created'", "}", "return", "False" ]
create a new data center in this vmware environment cli example: .
train
true
46,648
def test_merge_opt_runtime(): x = T.dvector() for i in xrange(50): if i: r = (r + (r / 10)) else: r = x t = time.time() f = theano.function([x], r, mode='FAST_COMPILE') dt = (time.time() - t) assert (dt < 5.0), dt
[ "def", "test_merge_opt_runtime", "(", ")", ":", "x", "=", "T", ".", "dvector", "(", ")", "for", "i", "in", "xrange", "(", "50", ")", ":", "if", "i", ":", "r", "=", "(", "r", "+", "(", "r", "/", "10", ")", ")", "else", ":", "r", "=", "x", "t", "=", "time", ".", "time", "(", ")", "f", "=", "theano", ".", "function", "(", "[", "x", "]", ",", "r", ",", "mode", "=", "'FAST_COMPILE'", ")", "dt", "=", "(", "time", ".", "time", "(", ")", "-", "t", ")", "assert", "(", "dt", "<", "5.0", ")", ",", "dt" ]
in the original merge optimization .
train
false
46,649
def status_webapp(app, url='http://localhost:8080/manager', timeout=180): webapps = ls(url, timeout=timeout) for i in webapps: if (i == app): return webapps[i]['mode'] return 'missing'
[ "def", "status_webapp", "(", "app", ",", "url", "=", "'http://localhost:8080/manager'", ",", "timeout", "=", "180", ")", ":", "webapps", "=", "ls", "(", "url", ",", "timeout", "=", "timeout", ")", "for", "i", "in", "webapps", ":", "if", "(", "i", "==", "app", ")", ":", "return", "webapps", "[", "i", "]", "[", "'mode'", "]", "return", "'missing'" ]
return the status of the webapp app the webapp context path url : URL the url of the server manager webapp timeout : 180 timeout for http request cli examples: .
train
true
46,651
def load_single_xblock(request, user_id, course_id, usage_key_string, course=None): usage_key = UsageKey.from_string(usage_key_string) course_key = CourseKey.from_string(course_id) usage_key = usage_key.map_into_course(course_key) user = User.objects.get(id=user_id) field_data_cache = FieldDataCache.cache_for_descriptor_descendents(course_key, user, modulestore().get_item(usage_key), depth=0) instance = get_module(user, request, usage_key, field_data_cache, grade_bucket_type='xqueue', course=course) if (instance is None): msg = 'No module {0} for user {1}--access denied?'.format(usage_key_string, user) log.debug(msg) raise Http404 return instance
[ "def", "load_single_xblock", "(", "request", ",", "user_id", ",", "course_id", ",", "usage_key_string", ",", "course", "=", "None", ")", ":", "usage_key", "=", "UsageKey", ".", "from_string", "(", "usage_key_string", ")", "course_key", "=", "CourseKey", ".", "from_string", "(", "course_id", ")", "usage_key", "=", "usage_key", ".", "map_into_course", "(", "course_key", ")", "user", "=", "User", ".", "objects", ".", "get", "(", "id", "=", "user_id", ")", "field_data_cache", "=", "FieldDataCache", ".", "cache_for_descriptor_descendents", "(", "course_key", ",", "user", ",", "modulestore", "(", ")", ".", "get_item", "(", "usage_key", ")", ",", "depth", "=", "0", ")", "instance", "=", "get_module", "(", "user", ",", "request", ",", "usage_key", ",", "field_data_cache", ",", "grade_bucket_type", "=", "'xqueue'", ",", "course", "=", "course", ")", "if", "(", "instance", "is", "None", ")", ":", "msg", "=", "'No module {0} for user {1}--access denied?'", ".", "format", "(", "usage_key_string", ",", "user", ")", "log", ".", "debug", "(", "msg", ")", "raise", "Http404", "return", "instance" ]
load a single xblock identified by usage_key_string .
train
false
46,652
@check_feature_enabled(feature_name='ENTRANCE_EXAMS') def update_entrance_exam(request, course_key, exam_data): course = modulestore().get_course(course_key) if course: metadata = exam_data CourseMetadata.update_from_dict(metadata, course, request.user)
[ "@", "check_feature_enabled", "(", "feature_name", "=", "'ENTRANCE_EXAMS'", ")", "def", "update_entrance_exam", "(", "request", ",", "course_key", ",", "exam_data", ")", ":", "course", "=", "modulestore", "(", ")", ".", "get_course", "(", "course_key", ")", "if", "course", ":", "metadata", "=", "exam_data", "CourseMetadata", ".", "update_from_dict", "(", "metadata", ",", "course", ",", "request", ".", "user", ")" ]
operation to update course fields pertaining to entrance exams the update operation is not currently exposed directly via the api because the operation is not exposed directly .
train
false
46,653
def get_emerge_default_opts(): return get_var('EMERGE_DEFAULT_OPTS')
[ "def", "get_emerge_default_opts", "(", ")", ":", "return", "get_var", "(", "'EMERGE_DEFAULT_OPTS'", ")" ]
get the value of emerge_default_opts variable in the make .
train
false
46,654
def plugin(): return SwapQuotes
[ "def", "plugin", "(", ")", ":", "return", "SwapQuotes" ]
make plugin available .
train
false
46,655
@image_comparison(baseline_images=[u'EventCollection_plot__set_linelength']) def test__EventCollection__set_linelength(): (splt, coll, props) = generate_EventCollection_plot() new_linelength = 15 coll.set_linelength(new_linelength) assert_equal(new_linelength, coll.get_linelength()) check_segments(coll, props[u'positions'], new_linelength, props[u'lineoffset'], props[u'orientation']) splt.set_title(u'EventCollection: set_linelength') splt.set_ylim((-20), 20)
[ "@", "image_comparison", "(", "baseline_images", "=", "[", "u'EventCollection_plot__set_linelength'", "]", ")", "def", "test__EventCollection__set_linelength", "(", ")", ":", "(", "splt", ",", "coll", ",", "props", ")", "=", "generate_EventCollection_plot", "(", ")", "new_linelength", "=", "15", "coll", ".", "set_linelength", "(", "new_linelength", ")", "assert_equal", "(", "new_linelength", ",", "coll", ".", "get_linelength", "(", ")", ")", "check_segments", "(", "coll", ",", "props", "[", "u'positions'", "]", ",", "new_linelength", ",", "props", "[", "u'lineoffset'", "]", ",", "props", "[", "u'orientation'", "]", ")", "splt", ".", "set_title", "(", "u'EventCollection: set_linelength'", ")", "splt", ".", "set_ylim", "(", "(", "-", "20", ")", ",", "20", ")" ]
check to make sure set_linelength works properly .
train
false
46,656
def pportD1(state): global dataReg if (state == 0): dataReg = (dataReg & (~ 2)) else: dataReg = (dataReg | 2) port.DlPortWritePortUchar(baseAddress, dataReg)
[ "def", "pportD1", "(", "state", ")", ":", "global", "dataReg", "if", "(", "state", "==", "0", ")", ":", "dataReg", "=", "(", "dataReg", "&", "(", "~", "2", ")", ")", "else", ":", "dataReg", "=", "(", "dataReg", "|", "2", ")", "port", ".", "DlPortWritePortUchar", "(", "baseAddress", ",", "dataReg", ")" ]
toggle data register d1 bit .
train
false
46,660
def _check_avail(cmd): if isinstance(cmd, list): cmd = ' '.join([(str(x) if (not isinstance(x, six.string_types)) else x) for x in cmd]) bret = True wret = False if __salt__['config.get']('cmd_blacklist_glob'): blist = __salt__['config.get']('cmd_blacklist_glob', []) for comp in blist: if fnmatch.fnmatch(cmd, comp): bret = False if __salt__['config.get']('cmd_whitelist_glob', []): blist = __salt__['config.get']('cmd_whitelist_glob', []) for comp in blist: if fnmatch.fnmatch(cmd, comp): wret = True break else: wret = True return (bret and wret)
[ "def", "_check_avail", "(", "cmd", ")", ":", "if", "isinstance", "(", "cmd", ",", "list", ")", ":", "cmd", "=", "' '", ".", "join", "(", "[", "(", "str", "(", "x", ")", "if", "(", "not", "isinstance", "(", "x", ",", "six", ".", "string_types", ")", ")", "else", "x", ")", "for", "x", "in", "cmd", "]", ")", "bret", "=", "True", "wret", "=", "False", "if", "__salt__", "[", "'config.get'", "]", "(", "'cmd_blacklist_glob'", ")", ":", "blist", "=", "__salt__", "[", "'config.get'", "]", "(", "'cmd_blacklist_glob'", ",", "[", "]", ")", "for", "comp", "in", "blist", ":", "if", "fnmatch", ".", "fnmatch", "(", "cmd", ",", "comp", ")", ":", "bret", "=", "False", "if", "__salt__", "[", "'config.get'", "]", "(", "'cmd_whitelist_glob'", ",", "[", "]", ")", ":", "blist", "=", "__salt__", "[", "'config.get'", "]", "(", "'cmd_whitelist_glob'", ",", "[", "]", ")", "for", "comp", "in", "blist", ":", "if", "fnmatch", ".", "fnmatch", "(", "cmd", ",", "comp", ")", ":", "wret", "=", "True", "break", "else", ":", "wret", "=", "True", "return", "(", "bret", "and", "wret", ")" ]
check to see if the given command can be run .
train
true
46,661
def process_otu_table_sample_ids(sample_id_fields): if (len(sample_id_fields) == 0): raise ValueError(('Error parsing sample ID line in OTU table. Fields are %s' % ' '.join(sample_id_fields))) last_column_header = sample_id_fields[(-1)].strip().replace(' ', '').lower() if (last_column_header in ['consensuslineage', 'otumetadata', 'taxonomy']): has_metadata = True sample_ids = sample_id_fields[:(-1)] else: has_metadata = False sample_ids = sample_id_fields return (sample_ids, has_metadata)
[ "def", "process_otu_table_sample_ids", "(", "sample_id_fields", ")", ":", "if", "(", "len", "(", "sample_id_fields", ")", "==", "0", ")", ":", "raise", "ValueError", "(", "(", "'Error parsing sample ID line in OTU table. Fields are %s'", "%", "' '", ".", "join", "(", "sample_id_fields", ")", ")", ")", "last_column_header", "=", "sample_id_fields", "[", "(", "-", "1", ")", "]", ".", "strip", "(", ")", ".", "replace", "(", "' '", ",", "''", ")", ".", "lower", "(", ")", "if", "(", "last_column_header", "in", "[", "'consensuslineage'", ",", "'otumetadata'", ",", "'taxonomy'", "]", ")", ":", "has_metadata", "=", "True", "sample_ids", "=", "sample_id_fields", "[", ":", "(", "-", "1", ")", "]", "else", ":", "has_metadata", "=", "False", "sample_ids", "=", "sample_id_fields", "return", "(", "sample_ids", ",", "has_metadata", ")" ]
process the sample ids line of an otu table .
train
false
46,662
@register.inclusion_tag('customer/history/recently_viewed_products.html', takes_context=True) def recently_viewed_products(context, current_product=None): request = context['request'] products = history.get(request) if current_product: products = [p for p in products if (p != current_product)] return {'products': products, 'request': request}
[ "@", "register", ".", "inclusion_tag", "(", "'customer/history/recently_viewed_products.html'", ",", "takes_context", "=", "True", ")", "def", "recently_viewed_products", "(", "context", ",", "current_product", "=", "None", ")", ":", "request", "=", "context", "[", "'request'", "]", "products", "=", "history", ".", "get", "(", "request", ")", "if", "current_product", ":", "products", "=", "[", "p", "for", "p", "in", "products", "if", "(", "p", "!=", "current_product", ")", "]", "return", "{", "'products'", ":", "products", ",", "'request'", ":", "request", "}" ]
inclusion tag listing the most recently viewed products .
train
false
46,663
def _strip_object(key): if (hasattr(key, 'version_agnostic') and hasattr(key, 'for_branch')): return key.for_branch(None).version_agnostic() else: return key
[ "def", "_strip_object", "(", "key", ")", ":", "if", "(", "hasattr", "(", "key", ",", "'version_agnostic'", ")", "and", "hasattr", "(", "key", ",", "'for_branch'", ")", ")", ":", "return", "key", ".", "for_branch", "(", "None", ")", ".", "version_agnostic", "(", ")", "else", ":", "return", "key" ]
strips branch and version info if the given key supports those attributes .
train
true
46,664
def get_stack_dir(stack, env=None): _init_rosstack(env=env) try: return _rosstack.get_path(stack) except rospkg.ResourceNotFound: raise InvalidROSStackException(stack)
[ "def", "get_stack_dir", "(", "stack", ",", "env", "=", "None", ")", ":", "_init_rosstack", "(", "env", "=", "env", ")", "try", ":", "return", "_rosstack", ".", "get_path", "(", "stack", ")", "except", "rospkg", ".", "ResourceNotFound", ":", "raise", "InvalidROSStackException", "(", "stack", ")" ]
get the directory of a ros stack .
train
false
46,665
def _taradd(func, tar_file, name): with tempfile.NamedTemporaryFile('wb', delete=False) as temp_file: func(temp_file) temp_file.close() tar_file.add(temp_file.name, arcname=name) if os.path.isfile(temp_file.name): os.remove(temp_file.name)
[ "def", "_taradd", "(", "func", ",", "tar_file", ",", "name", ")", ":", "with", "tempfile", ".", "NamedTemporaryFile", "(", "'wb'", ",", "delete", "=", "False", ")", "as", "temp_file", ":", "func", "(", "temp_file", ")", "temp_file", ".", "close", "(", ")", "tar_file", ".", "add", "(", "temp_file", ".", "name", ",", "arcname", "=", "name", ")", "if", "os", ".", "path", ".", "isfile", "(", "temp_file", ".", "name", ")", ":", "os", ".", "remove", "(", "temp_file", ".", "name", ")" ]
adds elements dumped by the function func to a tar_file .
train
false
46,666
def _fix_results(results): results = [obj._results_dict for obj in results] for obj in results: for key in ('indexed_on', 'created', 'updated'): if ((key in obj) and (not isinstance(obj[key], datetime))): obj[key] = datetime.fromtimestamp(int(obj[key])) return results
[ "def", "_fix_results", "(", "results", ")", ":", "results", "=", "[", "obj", ".", "_results_dict", "for", "obj", "in", "results", "]", "for", "obj", "in", "results", ":", "for", "key", "in", "(", "'indexed_on'", ",", "'created'", ",", "'updated'", ")", ":", "if", "(", "(", "key", "in", "obj", ")", "and", "(", "not", "isinstance", "(", "obj", "[", "key", "]", ",", "datetime", ")", ")", ")", ":", "obj", "[", "key", "]", "=", "datetime", ".", "fromtimestamp", "(", "int", "(", "obj", "[", "key", "]", ")", ")", "return", "results" ]
fixes up the s results for better templating 1 .
train
false
46,667
@nodes_or_number(0) def random_geometric_graph(n, radius, dim=2, pos=None, p=2): (n_name, nodes) = n G = nx.Graph() G.name = 'random_geometric_graph({}, {}, {})'.format(n, radius, dim) G.add_nodes_from(nodes) if (pos is None): pos = {v: [random.random() for i in range(dim)] for v in nodes} nx.set_node_attributes(G, 'pos', pos) if _is_scipy_available: _fast_construct_edges(G, radius, p) else: _slow_construct_edges(G, radius, p) return G
[ "@", "nodes_or_number", "(", "0", ")", "def", "random_geometric_graph", "(", "n", ",", "radius", ",", "dim", "=", "2", ",", "pos", "=", "None", ",", "p", "=", "2", ")", ":", "(", "n_name", ",", "nodes", ")", "=", "n", "G", "=", "nx", ".", "Graph", "(", ")", "G", ".", "name", "=", "'random_geometric_graph({}, {}, {})'", ".", "format", "(", "n", ",", "radius", ",", "dim", ")", "G", ".", "add_nodes_from", "(", "nodes", ")", "if", "(", "pos", "is", "None", ")", ":", "pos", "=", "{", "v", ":", "[", "random", ".", "random", "(", ")", "for", "i", "in", "range", "(", "dim", ")", "]", "for", "v", "in", "nodes", "}", "nx", ".", "set_node_attributes", "(", "G", ",", "'pos'", ",", "pos", ")", "if", "_is_scipy_available", ":", "_fast_construct_edges", "(", "G", ",", "radius", ",", "p", ")", "else", ":", "_slow_construct_edges", "(", "G", ",", "radius", ",", "p", ")", "return", "G" ]
returns a random geometric graph in the unit cube .
train
false
46,668
def href(*args, **kw): result = [(((request and request.script_root) or '') + '/')] for (idx, arg) in enumerate(args): result.append((((idx and '/') or '') + url_quote(arg))) if kw: result.append(('?' + url_encode(kw))) return ''.join(result)
[ "def", "href", "(", "*", "args", ",", "**", "kw", ")", ":", "result", "=", "[", "(", "(", "(", "request", "and", "request", ".", "script_root", ")", "or", "''", ")", "+", "'/'", ")", "]", "for", "(", "idx", ",", "arg", ")", "in", "enumerate", "(", "args", ")", ":", "result", ".", "append", "(", "(", "(", "(", "idx", "and", "'/'", ")", "or", "''", ")", "+", "url_quote", "(", "arg", ")", ")", ")", "if", "kw", ":", "result", ".", "append", "(", "(", "'?'", "+", "url_encode", "(", "kw", ")", ")", ")", "return", "''", ".", "join", "(", "result", ")" ]
simple function for url generation .
train
true
46,669
def init_rooter(): cuckoo = Config() if ((not Config('vpn').vpn.enabled) and (cuckoo.routing.route == 'none')): return cuckoo = Config() s = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) try: s.connect(cuckoo.cuckoo.rooter) except socket.error as e: if (e.strerror == 'No such file or directory'): raise CuckooStartupError('The rooter is required but it is either not running or it has been configured to a different Unix socket path. (In order to disable the use of rooter, please set route and internet to none in cuckoo.conf and enabled to no in vpn.conf).') if (e.strerror == 'Connection refused'): raise CuckooStartupError("The rooter is required but we can't connect to it as the rooter is not actually running. (In order to disable the use of rooter, please set route and internet to none in cuckoo.conf and enabled to no in vpn.conf).") if (e.strerror == 'Permission denied'): raise CuckooStartupError("The rooter is required but we can't connect to it due to incorrect permissions. Did you assign it the correct group? (In order to disable the use of rooter, please set route and internet to none in cuckoo.conf and enabled to no in vpn.conf).") raise CuckooStartupError(('Unknown rooter error: %s' % e)) rooter('forward_drop')
[ "def", "init_rooter", "(", ")", ":", "cuckoo", "=", "Config", "(", ")", "if", "(", "(", "not", "Config", "(", "'vpn'", ")", ".", "vpn", ".", "enabled", ")", "and", "(", "cuckoo", ".", "routing", ".", "route", "==", "'none'", ")", ")", ":", "return", "cuckoo", "=", "Config", "(", ")", "s", "=", "socket", ".", "socket", "(", "socket", ".", "AF_UNIX", ",", "socket", ".", "SOCK_DGRAM", ")", "try", ":", "s", ".", "connect", "(", "cuckoo", ".", "cuckoo", ".", "rooter", ")", "except", "socket", ".", "error", "as", "e", ":", "if", "(", "e", ".", "strerror", "==", "'No such file or directory'", ")", ":", "raise", "CuckooStartupError", "(", "'The rooter is required but it is either not running or it has been configured to a different Unix socket path. (In order to disable the use of rooter, please set route and internet to none in cuckoo.conf and enabled to no in vpn.conf).'", ")", "if", "(", "e", ".", "strerror", "==", "'Connection refused'", ")", ":", "raise", "CuckooStartupError", "(", "\"The rooter is required but we can't connect to it as the rooter is not actually running. (In order to disable the use of rooter, please set route and internet to none in cuckoo.conf and enabled to no in vpn.conf).\"", ")", "if", "(", "e", ".", "strerror", "==", "'Permission denied'", ")", ":", "raise", "CuckooStartupError", "(", "\"The rooter is required but we can't connect to it due to incorrect permissions. Did you assign it the correct group? (In order to disable the use of rooter, please set route and internet to none in cuckoo.conf and enabled to no in vpn.conf).\"", ")", "raise", "CuckooStartupError", "(", "(", "'Unknown rooter error: %s'", "%", "e", ")", ")", "rooter", "(", "'forward_drop'", ")" ]
if required .
train
false
46,670
def DNSServiceRegisterRecord(sdRef, flags, interfaceIndex=kDNSServiceInterfaceIndexAny, fullname=_NO_DEFAULT, rrtype=_NO_DEFAULT, rrclass=kDNSServiceClass_IN, rdata=_NO_DEFAULT, ttl=0, callBack=None): _NO_DEFAULT.check(fullname) _NO_DEFAULT.check(rrtype) _NO_DEFAULT.check(rdata) (rdlen, rdata) = _string_to_length_and_void_p(rdata) @_DNSServiceRegisterRecordReply def _callback(sdRef, RecordRef, flags, errorCode, context): if (callBack is not None): callBack(sdRef, RecordRef, flags, errorCode) _global_lock.acquire() try: RecordRef = _DNSServiceRegisterRecord(sdRef, flags, interfaceIndex, fullname, rrtype, rrclass, rdlen, rdata, ttl, _callback, None) finally: _global_lock.release() sdRef._add_callback(_callback) sdRef._add_record_ref(RecordRef) return RecordRef
[ "def", "DNSServiceRegisterRecord", "(", "sdRef", ",", "flags", ",", "interfaceIndex", "=", "kDNSServiceInterfaceIndexAny", ",", "fullname", "=", "_NO_DEFAULT", ",", "rrtype", "=", "_NO_DEFAULT", ",", "rrclass", "=", "kDNSServiceClass_IN", ",", "rdata", "=", "_NO_DEFAULT", ",", "ttl", "=", "0", ",", "callBack", "=", "None", ")", ":", "_NO_DEFAULT", ".", "check", "(", "fullname", ")", "_NO_DEFAULT", ".", "check", "(", "rrtype", ")", "_NO_DEFAULT", ".", "check", "(", "rdata", ")", "(", "rdlen", ",", "rdata", ")", "=", "_string_to_length_and_void_p", "(", "rdata", ")", "@", "_DNSServiceRegisterRecordReply", "def", "_callback", "(", "sdRef", ",", "RecordRef", ",", "flags", ",", "errorCode", ",", "context", ")", ":", "if", "(", "callBack", "is", "not", "None", ")", ":", "callBack", "(", "sdRef", ",", "RecordRef", ",", "flags", ",", "errorCode", ")", "_global_lock", ".", "acquire", "(", ")", "try", ":", "RecordRef", "=", "_DNSServiceRegisterRecord", "(", "sdRef", ",", "flags", ",", "interfaceIndex", ",", "fullname", ",", "rrtype", ",", "rrclass", ",", "rdlen", ",", "rdata", ",", "ttl", ",", "_callback", ",", "None", ")", "finally", ":", "_global_lock", ".", "release", "(", ")", "sdRef", ".", "_add_callback", "(", "_callback", ")", "sdRef", ".", "_add_record_ref", "(", "RecordRef", ")", "return", "RecordRef" ]
register an individual resource record on a connected dnsserviceref .
train
false