id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
18,800
|
vals/umis
|
umis/umis.py
|
_extract_readnum
|
def _extract_readnum(read_dict):
"""Extract read numbers from old-style fastqs.
Handles read 1 and 2 specifications where naming is
readname/1 readname/2
"""
pat = re.compile(r"(?P<readnum>/\d+)$")
parts = pat.split(read_dict["name"])
if len(parts) == 3:
name, readnum, endofline = parts
read_dict["name"] = name
read_dict["readnum"] = readnum
else:
read_dict["readnum"] = ""
return read_dict
|
python
|
def _extract_readnum(read_dict):
"""Extract read numbers from old-style fastqs.
Handles read 1 and 2 specifications where naming is
readname/1 readname/2
"""
pat = re.compile(r"(?P<readnum>/\d+)$")
parts = pat.split(read_dict["name"])
if len(parts) == 3:
name, readnum, endofline = parts
read_dict["name"] = name
read_dict["readnum"] = readnum
else:
read_dict["readnum"] = ""
return read_dict
|
[
"def",
"_extract_readnum",
"(",
"read_dict",
")",
":",
"pat",
"=",
"re",
".",
"compile",
"(",
"r\"(?P<readnum>/\\d+)$\"",
")",
"parts",
"=",
"pat",
".",
"split",
"(",
"read_dict",
"[",
"\"name\"",
"]",
")",
"if",
"len",
"(",
"parts",
")",
"==",
"3",
":",
"name",
",",
"readnum",
",",
"endofline",
"=",
"parts",
"read_dict",
"[",
"\"name\"",
"]",
"=",
"name",
"read_dict",
"[",
"\"readnum\"",
"]",
"=",
"readnum",
"else",
":",
"read_dict",
"[",
"\"readnum\"",
"]",
"=",
"\"\"",
"return",
"read_dict"
] |
Extract read numbers from old-style fastqs.
Handles read 1 and 2 specifications where naming is
readname/1 readname/2
|
[
"Extract",
"read",
"numbers",
"from",
"old",
"-",
"style",
"fastqs",
"."
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L334-L348
|
18,801
|
vals/umis
|
umis/umis.py
|
sparse
|
def sparse(csv, sparse):
''' Convert a CSV file to a sparse matrix with rows and column names
saved as companion files.
'''
import pandas as pd
df = pd.read_csv(csv, index_col=0, header=0)
pd.Series(df.index).to_csv(sparse + ".rownames", index=False)
pd.Series(df.columns.values).to_csv(sparse + ".colnames", index=False)
with open(sparse, "w+b") as out_handle:
scipy.io.mmwrite(out_handle, scipy.sparse.csr_matrix(df))
|
python
|
def sparse(csv, sparse):
''' Convert a CSV file to a sparse matrix with rows and column names
saved as companion files.
'''
import pandas as pd
df = pd.read_csv(csv, index_col=0, header=0)
pd.Series(df.index).to_csv(sparse + ".rownames", index=False)
pd.Series(df.columns.values).to_csv(sparse + ".colnames", index=False)
with open(sparse, "w+b") as out_handle:
scipy.io.mmwrite(out_handle, scipy.sparse.csr_matrix(df))
|
[
"def",
"sparse",
"(",
"csv",
",",
"sparse",
")",
":",
"import",
"pandas",
"as",
"pd",
"df",
"=",
"pd",
".",
"read_csv",
"(",
"csv",
",",
"index_col",
"=",
"0",
",",
"header",
"=",
"0",
")",
"pd",
".",
"Series",
"(",
"df",
".",
"index",
")",
".",
"to_csv",
"(",
"sparse",
"+",
"\".rownames\"",
",",
"index",
"=",
"False",
")",
"pd",
".",
"Series",
"(",
"df",
".",
"columns",
".",
"values",
")",
".",
"to_csv",
"(",
"sparse",
"+",
"\".colnames\"",
",",
"index",
"=",
"False",
")",
"with",
"open",
"(",
"sparse",
",",
"\"w+b\"",
")",
"as",
"out_handle",
":",
"scipy",
".",
"io",
".",
"mmwrite",
"(",
"out_handle",
",",
"scipy",
".",
"sparse",
".",
"csr_matrix",
"(",
"df",
")",
")"
] |
Convert a CSV file to a sparse matrix with rows and column names
saved as companion files.
|
[
"Convert",
"a",
"CSV",
"file",
"to",
"a",
"sparse",
"matrix",
"with",
"rows",
"and",
"column",
"names",
"saved",
"as",
"companion",
"files",
"."
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L943-L952
|
18,802
|
vals/umis
|
umis/umis.py
|
cb_histogram
|
def cb_histogram(fastq, umi_histogram):
''' Counts the number of reads for each cellular barcode
Expects formatted fastq files.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
cb_counter = collections.Counter()
umi_counter = collections.Counter()
for read in read_fastq(fastq):
match = parser_re.search(read).groupdict()
cb = match['CB']
cb_counter[cb] += 1
if umi_histogram:
umi = match['MB']
umi_counter[(cb, umi)] += 1
for bc, count in cb_counter.most_common():
sys.stdout.write('{}\t{}\n'.format(bc, count))
if umi_histogram:
with open(umi_histogram, "w") as umi_handle:
for cbumi, count in umi_counter.most_common():
umi_handle.write('{}\t{}\t{}\n'.format(cbumi[0], cbumi[1], count))
|
python
|
def cb_histogram(fastq, umi_histogram):
''' Counts the number of reads for each cellular barcode
Expects formatted fastq files.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
cb_counter = collections.Counter()
umi_counter = collections.Counter()
for read in read_fastq(fastq):
match = parser_re.search(read).groupdict()
cb = match['CB']
cb_counter[cb] += 1
if umi_histogram:
umi = match['MB']
umi_counter[(cb, umi)] += 1
for bc, count in cb_counter.most_common():
sys.stdout.write('{}\t{}\n'.format(bc, count))
if umi_histogram:
with open(umi_histogram, "w") as umi_handle:
for cbumi, count in umi_counter.most_common():
umi_handle.write('{}\t{}\t{}\n'.format(cbumi[0], cbumi[1], count))
|
[
"def",
"cb_histogram",
"(",
"fastq",
",",
"umi_histogram",
")",
":",
"annotations",
"=",
"detect_fastq_annotations",
"(",
"fastq",
")",
"re_string",
"=",
"construct_transformed_regex",
"(",
"annotations",
")",
"parser_re",
"=",
"re",
".",
"compile",
"(",
"re_string",
")",
"cb_counter",
"=",
"collections",
".",
"Counter",
"(",
")",
"umi_counter",
"=",
"collections",
".",
"Counter",
"(",
")",
"for",
"read",
"in",
"read_fastq",
"(",
"fastq",
")",
":",
"match",
"=",
"parser_re",
".",
"search",
"(",
"read",
")",
".",
"groupdict",
"(",
")",
"cb",
"=",
"match",
"[",
"'CB'",
"]",
"cb_counter",
"[",
"cb",
"]",
"+=",
"1",
"if",
"umi_histogram",
":",
"umi",
"=",
"match",
"[",
"'MB'",
"]",
"umi_counter",
"[",
"(",
"cb",
",",
"umi",
")",
"]",
"+=",
"1",
"for",
"bc",
",",
"count",
"in",
"cb_counter",
".",
"most_common",
"(",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"'{}\\t{}\\n'",
".",
"format",
"(",
"bc",
",",
"count",
")",
")",
"if",
"umi_histogram",
":",
"with",
"open",
"(",
"umi_histogram",
",",
"\"w\"",
")",
"as",
"umi_handle",
":",
"for",
"cbumi",
",",
"count",
"in",
"umi_counter",
".",
"most_common",
"(",
")",
":",
"umi_handle",
".",
"write",
"(",
"'{}\\t{}\\t{}\\n'",
".",
"format",
"(",
"cbumi",
"[",
"0",
"]",
",",
"cbumi",
"[",
"1",
"]",
",",
"count",
")",
")"
] |
Counts the number of reads for each cellular barcode
Expects formatted fastq files.
|
[
"Counts",
"the",
"number",
"of",
"reads",
"for",
"each",
"cellular",
"barcode"
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L959-L985
|
18,803
|
vals/umis
|
umis/umis.py
|
umi_histogram
|
def umi_histogram(fastq):
''' Counts the number of reads for each UMI
Expects formatted fastq files.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
counter = collections.Counter()
for read in read_fastq(fastq):
match = parser_re.search(read).groupdict()
counter[match['MB']] += 1
for bc, count in counter.most_common():
sys.stdout.write('{}\t{}\n'.format(bc, count))
|
python
|
def umi_histogram(fastq):
''' Counts the number of reads for each UMI
Expects formatted fastq files.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
counter = collections.Counter()
for read in read_fastq(fastq):
match = parser_re.search(read).groupdict()
counter[match['MB']] += 1
for bc, count in counter.most_common():
sys.stdout.write('{}\t{}\n'.format(bc, count))
|
[
"def",
"umi_histogram",
"(",
"fastq",
")",
":",
"annotations",
"=",
"detect_fastq_annotations",
"(",
"fastq",
")",
"re_string",
"=",
"construct_transformed_regex",
"(",
"annotations",
")",
"parser_re",
"=",
"re",
".",
"compile",
"(",
"re_string",
")",
"counter",
"=",
"collections",
".",
"Counter",
"(",
")",
"for",
"read",
"in",
"read_fastq",
"(",
"fastq",
")",
":",
"match",
"=",
"parser_re",
".",
"search",
"(",
"read",
")",
".",
"groupdict",
"(",
")",
"counter",
"[",
"match",
"[",
"'MB'",
"]",
"]",
"+=",
"1",
"for",
"bc",
",",
"count",
"in",
"counter",
".",
"most_common",
"(",
")",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"'{}\\t{}\\n'",
".",
"format",
"(",
"bc",
",",
"count",
")",
")"
] |
Counts the number of reads for each UMI
Expects formatted fastq files.
|
[
"Counts",
"the",
"number",
"of",
"reads",
"for",
"each",
"UMI"
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L989-L1004
|
18,804
|
vals/umis
|
umis/umis.py
|
get_cb_depth_set
|
def get_cb_depth_set(cb_histogram, cb_cutoff):
''' Returns a set of barcodes with a minimum number of reads
'''
cb_keep_set = set()
if not cb_histogram:
return cb_keep_set
with read_cbhistogram(cb_histogram) as fh:
cb_map = dict(p.strip().split() for p in fh)
cb_keep_set = set([k for k, v in cb_map.items() if int(v) > cb_cutoff])
logger.info('Keeping %d out of %d cellular barcodes.'
% (len(cb_keep_set), len(cb_map)))
return cb_keep_set
|
python
|
def get_cb_depth_set(cb_histogram, cb_cutoff):
''' Returns a set of barcodes with a minimum number of reads
'''
cb_keep_set = set()
if not cb_histogram:
return cb_keep_set
with read_cbhistogram(cb_histogram) as fh:
cb_map = dict(p.strip().split() for p in fh)
cb_keep_set = set([k for k, v in cb_map.items() if int(v) > cb_cutoff])
logger.info('Keeping %d out of %d cellular barcodes.'
% (len(cb_keep_set), len(cb_map)))
return cb_keep_set
|
[
"def",
"get_cb_depth_set",
"(",
"cb_histogram",
",",
"cb_cutoff",
")",
":",
"cb_keep_set",
"=",
"set",
"(",
")",
"if",
"not",
"cb_histogram",
":",
"return",
"cb_keep_set",
"with",
"read_cbhistogram",
"(",
"cb_histogram",
")",
"as",
"fh",
":",
"cb_map",
"=",
"dict",
"(",
"p",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"for",
"p",
"in",
"fh",
")",
"cb_keep_set",
"=",
"set",
"(",
"[",
"k",
"for",
"k",
",",
"v",
"in",
"cb_map",
".",
"items",
"(",
")",
"if",
"int",
"(",
"v",
")",
">",
"cb_cutoff",
"]",
")",
"logger",
".",
"info",
"(",
"'Keeping %d out of %d cellular barcodes.'",
"%",
"(",
"len",
"(",
"cb_keep_set",
")",
",",
"len",
"(",
"cb_map",
")",
")",
")",
"return",
"cb_keep_set"
] |
Returns a set of barcodes with a minimum number of reads
|
[
"Returns",
"a",
"set",
"of",
"barcodes",
"with",
"a",
"minimum",
"number",
"of",
"reads"
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1006-L1018
|
18,805
|
vals/umis
|
umis/umis.py
|
guess_depth_cutoff
|
def guess_depth_cutoff(cb_histogram):
''' Guesses at an appropriate barcode cutoff
'''
with read_cbhistogram(cb_histogram) as fh:
cb_vals = [int(p.strip().split()[1]) for p in fh]
histo = np.histogram(np.log10(cb_vals), bins=50)
vals = histo[0]
edges = histo[1]
mids = np.array([(edges[i] + edges[i+1])/2 for i in range(edges.size - 1)])
wdensity = vals * (10**mids) / sum(vals * (10**mids))
baseline = np.median(wdensity)
wdensity = list(wdensity)
# find highest density in upper half of barcode distribution
peak = wdensity.index(max(wdensity[len(wdensity)/2:]))
cutoff = None
for index, dens in reversed(list(enumerate(wdensity[1:peak]))):
if dens < 2 * baseline:
cutoff = index
break
if not cutoff:
return None
else:
cutoff = 10**mids[cutoff]
logger.info('Setting barcode cutoff to %d' % cutoff)
return cutoff
|
python
|
def guess_depth_cutoff(cb_histogram):
''' Guesses at an appropriate barcode cutoff
'''
with read_cbhistogram(cb_histogram) as fh:
cb_vals = [int(p.strip().split()[1]) for p in fh]
histo = np.histogram(np.log10(cb_vals), bins=50)
vals = histo[0]
edges = histo[1]
mids = np.array([(edges[i] + edges[i+1])/2 for i in range(edges.size - 1)])
wdensity = vals * (10**mids) / sum(vals * (10**mids))
baseline = np.median(wdensity)
wdensity = list(wdensity)
# find highest density in upper half of barcode distribution
peak = wdensity.index(max(wdensity[len(wdensity)/2:]))
cutoff = None
for index, dens in reversed(list(enumerate(wdensity[1:peak]))):
if dens < 2 * baseline:
cutoff = index
break
if not cutoff:
return None
else:
cutoff = 10**mids[cutoff]
logger.info('Setting barcode cutoff to %d' % cutoff)
return cutoff
|
[
"def",
"guess_depth_cutoff",
"(",
"cb_histogram",
")",
":",
"with",
"read_cbhistogram",
"(",
"cb_histogram",
")",
"as",
"fh",
":",
"cb_vals",
"=",
"[",
"int",
"(",
"p",
".",
"strip",
"(",
")",
".",
"split",
"(",
")",
"[",
"1",
"]",
")",
"for",
"p",
"in",
"fh",
"]",
"histo",
"=",
"np",
".",
"histogram",
"(",
"np",
".",
"log10",
"(",
"cb_vals",
")",
",",
"bins",
"=",
"50",
")",
"vals",
"=",
"histo",
"[",
"0",
"]",
"edges",
"=",
"histo",
"[",
"1",
"]",
"mids",
"=",
"np",
".",
"array",
"(",
"[",
"(",
"edges",
"[",
"i",
"]",
"+",
"edges",
"[",
"i",
"+",
"1",
"]",
")",
"/",
"2",
"for",
"i",
"in",
"range",
"(",
"edges",
".",
"size",
"-",
"1",
")",
"]",
")",
"wdensity",
"=",
"vals",
"*",
"(",
"10",
"**",
"mids",
")",
"/",
"sum",
"(",
"vals",
"*",
"(",
"10",
"**",
"mids",
")",
")",
"baseline",
"=",
"np",
".",
"median",
"(",
"wdensity",
")",
"wdensity",
"=",
"list",
"(",
"wdensity",
")",
"# find highest density in upper half of barcode distribution",
"peak",
"=",
"wdensity",
".",
"index",
"(",
"max",
"(",
"wdensity",
"[",
"len",
"(",
"wdensity",
")",
"/",
"2",
":",
"]",
")",
")",
"cutoff",
"=",
"None",
"for",
"index",
",",
"dens",
"in",
"reversed",
"(",
"list",
"(",
"enumerate",
"(",
"wdensity",
"[",
"1",
":",
"peak",
"]",
")",
")",
")",
":",
"if",
"dens",
"<",
"2",
"*",
"baseline",
":",
"cutoff",
"=",
"index",
"break",
"if",
"not",
"cutoff",
":",
"return",
"None",
"else",
":",
"cutoff",
"=",
"10",
"**",
"mids",
"[",
"cutoff",
"]",
"logger",
".",
"info",
"(",
"'Setting barcode cutoff to %d'",
"%",
"cutoff",
")",
"return",
"cutoff"
] |
Guesses at an appropriate barcode cutoff
|
[
"Guesses",
"at",
"an",
"appropriate",
"barcode",
"cutoff"
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1020-L1044
|
18,806
|
vals/umis
|
umis/umis.py
|
cb_filter
|
def cb_filter(fastq, bc1, bc2, bc3, cores, nedit):
''' Filters reads with non-matching barcodes
Expects formatted fastq files.
'''
with open_gzipsafe(bc1) as bc1_fh:
bc1 = set(cb.strip() for cb in bc1_fh)
if bc2:
with open_gzipsafe(bc2) as bc2_fh:
bc2 = set(cb.strip() for cb in bc2_fh)
if bc3:
with open_gzipsafe(bc3) as bc3_fh:
bc3 = set(cb.strip() for cb in bc3_fh)
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
if nedit == 0:
filter_cb = partial(exact_barcode_filter, bc1=bc1, bc2=bc2, bc3=bc3,
re_string=re_string)
else:
bc1hash = MutationHash(bc1, nedit)
bc2hash = None
bc3hash = None
if bc2:
bc2hash = MutationHash(bc2, nedit)
if bc3:
bc3hash = MutationHash(bc3, nedit)
filter_cb = partial(correcting_barcode_filter, bc1hash=bc1hash,
bc2hash=bc2hash, bc3hash=bc3hash, re_string=re_string)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_cb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read)
|
python
|
def cb_filter(fastq, bc1, bc2, bc3, cores, nedit):
''' Filters reads with non-matching barcodes
Expects formatted fastq files.
'''
with open_gzipsafe(bc1) as bc1_fh:
bc1 = set(cb.strip() for cb in bc1_fh)
if bc2:
with open_gzipsafe(bc2) as bc2_fh:
bc2 = set(cb.strip() for cb in bc2_fh)
if bc3:
with open_gzipsafe(bc3) as bc3_fh:
bc3 = set(cb.strip() for cb in bc3_fh)
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
if nedit == 0:
filter_cb = partial(exact_barcode_filter, bc1=bc1, bc2=bc2, bc3=bc3,
re_string=re_string)
else:
bc1hash = MutationHash(bc1, nedit)
bc2hash = None
bc3hash = None
if bc2:
bc2hash = MutationHash(bc2, nedit)
if bc3:
bc3hash = MutationHash(bc3, nedit)
filter_cb = partial(correcting_barcode_filter, bc1hash=bc1hash,
bc2hash=bc2hash, bc3hash=bc3hash, re_string=re_string)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_cb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read)
|
[
"def",
"cb_filter",
"(",
"fastq",
",",
"bc1",
",",
"bc2",
",",
"bc3",
",",
"cores",
",",
"nedit",
")",
":",
"with",
"open_gzipsafe",
"(",
"bc1",
")",
"as",
"bc1_fh",
":",
"bc1",
"=",
"set",
"(",
"cb",
".",
"strip",
"(",
")",
"for",
"cb",
"in",
"bc1_fh",
")",
"if",
"bc2",
":",
"with",
"open_gzipsafe",
"(",
"bc2",
")",
"as",
"bc2_fh",
":",
"bc2",
"=",
"set",
"(",
"cb",
".",
"strip",
"(",
")",
"for",
"cb",
"in",
"bc2_fh",
")",
"if",
"bc3",
":",
"with",
"open_gzipsafe",
"(",
"bc3",
")",
"as",
"bc3_fh",
":",
"bc3",
"=",
"set",
"(",
"cb",
".",
"strip",
"(",
")",
"for",
"cb",
"in",
"bc3_fh",
")",
"annotations",
"=",
"detect_fastq_annotations",
"(",
"fastq",
")",
"re_string",
"=",
"construct_transformed_regex",
"(",
"annotations",
")",
"if",
"nedit",
"==",
"0",
":",
"filter_cb",
"=",
"partial",
"(",
"exact_barcode_filter",
",",
"bc1",
"=",
"bc1",
",",
"bc2",
"=",
"bc2",
",",
"bc3",
"=",
"bc3",
",",
"re_string",
"=",
"re_string",
")",
"else",
":",
"bc1hash",
"=",
"MutationHash",
"(",
"bc1",
",",
"nedit",
")",
"bc2hash",
"=",
"None",
"bc3hash",
"=",
"None",
"if",
"bc2",
":",
"bc2hash",
"=",
"MutationHash",
"(",
"bc2",
",",
"nedit",
")",
"if",
"bc3",
":",
"bc3hash",
"=",
"MutationHash",
"(",
"bc3",
",",
"nedit",
")",
"filter_cb",
"=",
"partial",
"(",
"correcting_barcode_filter",
",",
"bc1hash",
"=",
"bc1hash",
",",
"bc2hash",
"=",
"bc2hash",
",",
"bc3hash",
"=",
"bc3hash",
",",
"re_string",
"=",
"re_string",
")",
"p",
"=",
"multiprocessing",
".",
"Pool",
"(",
"cores",
")",
"chunks",
"=",
"tz",
".",
"partition_all",
"(",
"10000",
",",
"read_fastq",
"(",
"fastq",
")",
")",
"bigchunks",
"=",
"tz",
".",
"partition_all",
"(",
"cores",
",",
"chunks",
")",
"for",
"bigchunk",
"in",
"bigchunks",
":",
"for",
"chunk",
"in",
"p",
".",
"map",
"(",
"filter_cb",
",",
"list",
"(",
"bigchunk",
")",
")",
":",
"for",
"read",
"in",
"chunk",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"read",
")"
] |
Filters reads with non-matching barcodes
Expects formatted fastq files.
|
[
"Filters",
"reads",
"with",
"non",
"-",
"matching",
"barcodes",
"Expects",
"formatted",
"fastq",
"files",
"."
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1053-L1090
|
18,807
|
vals/umis
|
umis/umis.py
|
sb_filter
|
def sb_filter(fastq, bc, cores, nedit):
''' Filters reads with non-matching sample barcodes
Expects formatted fastq files.
'''
barcodes = set(sb.strip() for sb in bc)
if nedit == 0:
filter_sb = partial(exact_sample_filter2, barcodes=barcodes)
else:
barcodehash = MutationHash(barcodes, nedit)
filter_sb = partial(correcting_sample_filter2, barcodehash=barcodehash)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_sb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read)
|
python
|
def sb_filter(fastq, bc, cores, nedit):
''' Filters reads with non-matching sample barcodes
Expects formatted fastq files.
'''
barcodes = set(sb.strip() for sb in bc)
if nedit == 0:
filter_sb = partial(exact_sample_filter2, barcodes=barcodes)
else:
barcodehash = MutationHash(barcodes, nedit)
filter_sb = partial(correcting_sample_filter2, barcodehash=barcodehash)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_sb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read)
|
[
"def",
"sb_filter",
"(",
"fastq",
",",
"bc",
",",
"cores",
",",
"nedit",
")",
":",
"barcodes",
"=",
"set",
"(",
"sb",
".",
"strip",
"(",
")",
"for",
"sb",
"in",
"bc",
")",
"if",
"nedit",
"==",
"0",
":",
"filter_sb",
"=",
"partial",
"(",
"exact_sample_filter2",
",",
"barcodes",
"=",
"barcodes",
")",
"else",
":",
"barcodehash",
"=",
"MutationHash",
"(",
"barcodes",
",",
"nedit",
")",
"filter_sb",
"=",
"partial",
"(",
"correcting_sample_filter2",
",",
"barcodehash",
"=",
"barcodehash",
")",
"p",
"=",
"multiprocessing",
".",
"Pool",
"(",
"cores",
")",
"chunks",
"=",
"tz",
".",
"partition_all",
"(",
"10000",
",",
"read_fastq",
"(",
"fastq",
")",
")",
"bigchunks",
"=",
"tz",
".",
"partition_all",
"(",
"cores",
",",
"chunks",
")",
"for",
"bigchunk",
"in",
"bigchunks",
":",
"for",
"chunk",
"in",
"p",
".",
"map",
"(",
"filter_sb",
",",
"list",
"(",
"bigchunk",
")",
")",
":",
"for",
"read",
"in",
"chunk",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"read",
")"
] |
Filters reads with non-matching sample barcodes
Expects formatted fastq files.
|
[
"Filters",
"reads",
"with",
"non",
"-",
"matching",
"sample",
"barcodes",
"Expects",
"formatted",
"fastq",
"files",
"."
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1097-L1114
|
18,808
|
vals/umis
|
umis/umis.py
|
mb_filter
|
def mb_filter(fastq, cores):
''' Filters umis with non-ACGT bases
Expects formatted fastq files.
'''
filter_mb = partial(umi_filter)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_mb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read)
|
python
|
def mb_filter(fastq, cores):
''' Filters umis with non-ACGT bases
Expects formatted fastq files.
'''
filter_mb = partial(umi_filter)
p = multiprocessing.Pool(cores)
chunks = tz.partition_all(10000, read_fastq(fastq))
bigchunks = tz.partition_all(cores, chunks)
for bigchunk in bigchunks:
for chunk in p.map(filter_mb, list(bigchunk)):
for read in chunk:
sys.stdout.write(read)
|
[
"def",
"mb_filter",
"(",
"fastq",
",",
"cores",
")",
":",
"filter_mb",
"=",
"partial",
"(",
"umi_filter",
")",
"p",
"=",
"multiprocessing",
".",
"Pool",
"(",
"cores",
")",
"chunks",
"=",
"tz",
".",
"partition_all",
"(",
"10000",
",",
"read_fastq",
"(",
"fastq",
")",
")",
"bigchunks",
"=",
"tz",
".",
"partition_all",
"(",
"cores",
",",
"chunks",
")",
"for",
"bigchunk",
"in",
"bigchunks",
":",
"for",
"chunk",
"in",
"p",
".",
"map",
"(",
"filter_mb",
",",
"list",
"(",
"bigchunk",
")",
")",
":",
"for",
"read",
"in",
"chunk",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"read",
")"
] |
Filters umis with non-ACGT bases
Expects formatted fastq files.
|
[
"Filters",
"umis",
"with",
"non",
"-",
"ACGT",
"bases",
"Expects",
"formatted",
"fastq",
"files",
"."
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1119-L1131
|
18,809
|
vals/umis
|
umis/umis.py
|
kallisto
|
def kallisto(fastq, out_dir, cb_histogram, cb_cutoff):
''' Convert fastqtransformed file to output format compatible with
kallisto.
'''
parser_re = re.compile('(.*):CELL_(?<CB>.*):UMI_(?P<UMI>.*)\\n(.*)\\n\\+\\n(.*)\\n')
if fastq.endswith('gz'):
fastq_fh = gzip.GzipFile(fileobj=open(fastq))
elif fastq == "-":
fastq_fh = sys.stdin
else:
fastq_fh = open(fastq)
cb_depth_set = get_cb_depth_set(cb_histogram, cb_cutoff)
cb_set = set()
cb_batch = collections.defaultdict(list)
parsed = 0
for read in stream_fastq(fastq_fh):
match = parser_re.search(read).groupdict()
umi = match['UMI']
cb = match['CB']
if cb_depth_set and cb not in cb_depth_set:
continue
parsed += 1
cb_set.add(cb)
cb_batch[cb].append((read, umi))
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for cb, chunk in cb_batch.items():
write_kallisto_chunk(out_dir, cb, chunk)
cb_batch = collections.defaultdict(list)
for cb, chunk in cb_batch.items():
write_kallisto_chunk(out_dir, cb, chunk)
with open(os.path.join(out_dir, "barcodes.batch"), "w") as out_handle:
out_handle.write("#id umi-file file-1\n")
batchformat = "{cb} {cb}.umi {cb}.fq\n"
for cb in cb_set:
out_handle.write(batchformat.format(**locals()))
|
python
|
def kallisto(fastq, out_dir, cb_histogram, cb_cutoff):
''' Convert fastqtransformed file to output format compatible with
kallisto.
'''
parser_re = re.compile('(.*):CELL_(?<CB>.*):UMI_(?P<UMI>.*)\\n(.*)\\n\\+\\n(.*)\\n')
if fastq.endswith('gz'):
fastq_fh = gzip.GzipFile(fileobj=open(fastq))
elif fastq == "-":
fastq_fh = sys.stdin
else:
fastq_fh = open(fastq)
cb_depth_set = get_cb_depth_set(cb_histogram, cb_cutoff)
cb_set = set()
cb_batch = collections.defaultdict(list)
parsed = 0
for read in stream_fastq(fastq_fh):
match = parser_re.search(read).groupdict()
umi = match['UMI']
cb = match['CB']
if cb_depth_set and cb not in cb_depth_set:
continue
parsed += 1
cb_set.add(cb)
cb_batch[cb].append((read, umi))
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for cb, chunk in cb_batch.items():
write_kallisto_chunk(out_dir, cb, chunk)
cb_batch = collections.defaultdict(list)
for cb, chunk in cb_batch.items():
write_kallisto_chunk(out_dir, cb, chunk)
with open(os.path.join(out_dir, "barcodes.batch"), "w") as out_handle:
out_handle.write("#id umi-file file-1\n")
batchformat = "{cb} {cb}.umi {cb}.fq\n"
for cb in cb_set:
out_handle.write(batchformat.format(**locals()))
|
[
"def",
"kallisto",
"(",
"fastq",
",",
"out_dir",
",",
"cb_histogram",
",",
"cb_cutoff",
")",
":",
"parser_re",
"=",
"re",
".",
"compile",
"(",
"'(.*):CELL_(?<CB>.*):UMI_(?P<UMI>.*)\\\\n(.*)\\\\n\\\\+\\\\n(.*)\\\\n'",
")",
"if",
"fastq",
".",
"endswith",
"(",
"'gz'",
")",
":",
"fastq_fh",
"=",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"open",
"(",
"fastq",
")",
")",
"elif",
"fastq",
"==",
"\"-\"",
":",
"fastq_fh",
"=",
"sys",
".",
"stdin",
"else",
":",
"fastq_fh",
"=",
"open",
"(",
"fastq",
")",
"cb_depth_set",
"=",
"get_cb_depth_set",
"(",
"cb_histogram",
",",
"cb_cutoff",
")",
"cb_set",
"=",
"set",
"(",
")",
"cb_batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"parsed",
"=",
"0",
"for",
"read",
"in",
"stream_fastq",
"(",
"fastq_fh",
")",
":",
"match",
"=",
"parser_re",
".",
"search",
"(",
"read",
")",
".",
"groupdict",
"(",
")",
"umi",
"=",
"match",
"[",
"'UMI'",
"]",
"cb",
"=",
"match",
"[",
"'CB'",
"]",
"if",
"cb_depth_set",
"and",
"cb",
"not",
"in",
"cb_depth_set",
":",
"continue",
"parsed",
"+=",
"1",
"cb_set",
".",
"add",
"(",
"cb",
")",
"cb_batch",
"[",
"cb",
"]",
".",
"append",
"(",
"(",
"read",
",",
"umi",
")",
")",
"# write in batches to avoid opening up file handles repeatedly",
"if",
"not",
"parsed",
"%",
"10000000",
":",
"for",
"cb",
",",
"chunk",
"in",
"cb_batch",
".",
"items",
"(",
")",
":",
"write_kallisto_chunk",
"(",
"out_dir",
",",
"cb",
",",
"chunk",
")",
"cb_batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"for",
"cb",
",",
"chunk",
"in",
"cb_batch",
".",
"items",
"(",
")",
":",
"write_kallisto_chunk",
"(",
"out_dir",
",",
"cb",
",",
"chunk",
")",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"out_dir",
",",
"\"barcodes.batch\"",
")",
",",
"\"w\"",
")",
"as",
"out_handle",
":",
"out_handle",
".",
"write",
"(",
"\"#id umi-file file-1\\n\"",
")",
"batchformat",
"=",
"\"{cb} {cb}.umi {cb}.fq\\n\"",
"for",
"cb",
"in",
"cb_set",
":",
"out_handle",
".",
"write",
"(",
"batchformat",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
")"
] |
Convert fastqtransformed file to output format compatible with
kallisto.
|
[
"Convert",
"fastqtransformed",
"file",
"to",
"output",
"format",
"compatible",
"with",
"kallisto",
"."
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1164-L1203
|
18,810
|
vals/umis
|
umis/umis.py
|
demultiplex_samples
|
def demultiplex_samples(fastq, out_dir, nedit, barcodes):
''' Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each sample.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
if barcodes:
barcodes = set(barcode.strip() for barcode in barcodes)
else:
barcodes = set()
if nedit == 0:
filter_bc = partial(exact_sample_filter, barcodes=barcodes)
else:
barcodehash = MutationHash(barcodes, nedit)
filter_bc = partial(correcting_sample_filter, barcodehash=barcodehash)
sample_set = set()
batch = collections.defaultdict(list)
parsed = 0
safe_makedir(out_dir)
for read in read_fastq(fastq):
parsed += 1
read = filter_bc(read)
if not read:
continue
match = parser_re.search(read).groupdict()
sample = match['SB']
sample_set.add(sample)
batch[sample].append(read)
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for sample, reads in batch.items():
out_file = os.path.join(out_dir, sample + ".fq")
with open(out_file, "a") as out_handle:
for read in reads:
fixed = filter_bc(read)
if fixed:
out_handle.write(fixed)
batch = collections.defaultdict(list)
for sample, reads in batch.items():
out_file = os.path.join(out_dir, sample + ".fq")
with open(out_file, "a") as out_handle:
for read in reads:
fixed = filter_bc(read)
if fixed:
out_handle.write(read)
|
python
|
def demultiplex_samples(fastq, out_dir, nedit, barcodes):
''' Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each sample.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
if barcodes:
barcodes = set(barcode.strip() for barcode in barcodes)
else:
barcodes = set()
if nedit == 0:
filter_bc = partial(exact_sample_filter, barcodes=barcodes)
else:
barcodehash = MutationHash(barcodes, nedit)
filter_bc = partial(correcting_sample_filter, barcodehash=barcodehash)
sample_set = set()
batch = collections.defaultdict(list)
parsed = 0
safe_makedir(out_dir)
for read in read_fastq(fastq):
parsed += 1
read = filter_bc(read)
if not read:
continue
match = parser_re.search(read).groupdict()
sample = match['SB']
sample_set.add(sample)
batch[sample].append(read)
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for sample, reads in batch.items():
out_file = os.path.join(out_dir, sample + ".fq")
with open(out_file, "a") as out_handle:
for read in reads:
fixed = filter_bc(read)
if fixed:
out_handle.write(fixed)
batch = collections.defaultdict(list)
for sample, reads in batch.items():
out_file = os.path.join(out_dir, sample + ".fq")
with open(out_file, "a") as out_handle:
for read in reads:
fixed = filter_bc(read)
if fixed:
out_handle.write(read)
|
[
"def",
"demultiplex_samples",
"(",
"fastq",
",",
"out_dir",
",",
"nedit",
",",
"barcodes",
")",
":",
"annotations",
"=",
"detect_fastq_annotations",
"(",
"fastq",
")",
"re_string",
"=",
"construct_transformed_regex",
"(",
"annotations",
")",
"parser_re",
"=",
"re",
".",
"compile",
"(",
"re_string",
")",
"if",
"barcodes",
":",
"barcodes",
"=",
"set",
"(",
"barcode",
".",
"strip",
"(",
")",
"for",
"barcode",
"in",
"barcodes",
")",
"else",
":",
"barcodes",
"=",
"set",
"(",
")",
"if",
"nedit",
"==",
"0",
":",
"filter_bc",
"=",
"partial",
"(",
"exact_sample_filter",
",",
"barcodes",
"=",
"barcodes",
")",
"else",
":",
"barcodehash",
"=",
"MutationHash",
"(",
"barcodes",
",",
"nedit",
")",
"filter_bc",
"=",
"partial",
"(",
"correcting_sample_filter",
",",
"barcodehash",
"=",
"barcodehash",
")",
"sample_set",
"=",
"set",
"(",
")",
"batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"parsed",
"=",
"0",
"safe_makedir",
"(",
"out_dir",
")",
"for",
"read",
"in",
"read_fastq",
"(",
"fastq",
")",
":",
"parsed",
"+=",
"1",
"read",
"=",
"filter_bc",
"(",
"read",
")",
"if",
"not",
"read",
":",
"continue",
"match",
"=",
"parser_re",
".",
"search",
"(",
"read",
")",
".",
"groupdict",
"(",
")",
"sample",
"=",
"match",
"[",
"'SB'",
"]",
"sample_set",
".",
"add",
"(",
"sample",
")",
"batch",
"[",
"sample",
"]",
".",
"append",
"(",
"read",
")",
"# write in batches to avoid opening up file handles repeatedly",
"if",
"not",
"parsed",
"%",
"10000000",
":",
"for",
"sample",
",",
"reads",
"in",
"batch",
".",
"items",
"(",
")",
":",
"out_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"out_dir",
",",
"sample",
"+",
"\".fq\"",
")",
"with",
"open",
"(",
"out_file",
",",
"\"a\"",
")",
"as",
"out_handle",
":",
"for",
"read",
"in",
"reads",
":",
"fixed",
"=",
"filter_bc",
"(",
"read",
")",
"if",
"fixed",
":",
"out_handle",
".",
"write",
"(",
"fixed",
")",
"batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"for",
"sample",
",",
"reads",
"in",
"batch",
".",
"items",
"(",
")",
":",
"out_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"out_dir",
",",
"sample",
"+",
"\".fq\"",
")",
"with",
"open",
"(",
"out_file",
",",
"\"a\"",
")",
"as",
"out_handle",
":",
"for",
"read",
"in",
"reads",
":",
"fixed",
"=",
"filter_bc",
"(",
"read",
")",
"if",
"fixed",
":",
"out_handle",
".",
"write",
"(",
"read",
")"
] |
Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each sample.
|
[
"Demultiplex",
"a",
"fastqtransformed",
"FASTQ",
"file",
"into",
"a",
"FASTQ",
"file",
"for",
"each",
"sample",
"."
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1256-L1305
|
18,811
|
vals/umis
|
umis/umis.py
|
demultiplex_cells
|
def demultiplex_cells(fastq, out_dir, readnumber, prefix, cb_histogram,
cb_cutoff):
''' Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each cell.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
readstring = "" if not readnumber else "_R{}".format(readnumber)
filestring = "{prefix}{sample}{readstring}.fq"
cb_set = set()
if cb_histogram:
cb_set = get_cb_depth_set(cb_histogram, cb_cutoff)
sample_set = set()
batch = collections.defaultdict(list)
parsed = 0
safe_makedir(out_dir)
for read in read_fastq(fastq):
parsed += 1
match = parser_re.search(read).groupdict()
sample = match['CB']
if cb_set and sample not in cb_set:
continue
sample_set.add(sample)
batch[sample].append(read)
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for sample, reads in batch.items():
out_file = os.path.join(out_dir, filestring.format(**locals()))
with open(out_file, "a") as out_handle:
for read in reads:
out_handle.write(read)
batch = collections.defaultdict(list)
for sample, reads in batch.items():
out_file = os.path.join(out_dir, filestring.format(**locals()))
with open(out_file, "a") as out_handle:
for read in reads:
out_handle.write(read)
|
python
|
def demultiplex_cells(fastq, out_dir, readnumber, prefix, cb_histogram,
cb_cutoff):
''' Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each cell.
'''
annotations = detect_fastq_annotations(fastq)
re_string = construct_transformed_regex(annotations)
parser_re = re.compile(re_string)
readstring = "" if not readnumber else "_R{}".format(readnumber)
filestring = "{prefix}{sample}{readstring}.fq"
cb_set = set()
if cb_histogram:
cb_set = get_cb_depth_set(cb_histogram, cb_cutoff)
sample_set = set()
batch = collections.defaultdict(list)
parsed = 0
safe_makedir(out_dir)
for read in read_fastq(fastq):
parsed += 1
match = parser_re.search(read).groupdict()
sample = match['CB']
if cb_set and sample not in cb_set:
continue
sample_set.add(sample)
batch[sample].append(read)
# write in batches to avoid opening up file handles repeatedly
if not parsed % 10000000:
for sample, reads in batch.items():
out_file = os.path.join(out_dir, filestring.format(**locals()))
with open(out_file, "a") as out_handle:
for read in reads:
out_handle.write(read)
batch = collections.defaultdict(list)
for sample, reads in batch.items():
out_file = os.path.join(out_dir, filestring.format(**locals()))
with open(out_file, "a") as out_handle:
for read in reads:
out_handle.write(read)
|
[
"def",
"demultiplex_cells",
"(",
"fastq",
",",
"out_dir",
",",
"readnumber",
",",
"prefix",
",",
"cb_histogram",
",",
"cb_cutoff",
")",
":",
"annotations",
"=",
"detect_fastq_annotations",
"(",
"fastq",
")",
"re_string",
"=",
"construct_transformed_regex",
"(",
"annotations",
")",
"parser_re",
"=",
"re",
".",
"compile",
"(",
"re_string",
")",
"readstring",
"=",
"\"\"",
"if",
"not",
"readnumber",
"else",
"\"_R{}\"",
".",
"format",
"(",
"readnumber",
")",
"filestring",
"=",
"\"{prefix}{sample}{readstring}.fq\"",
"cb_set",
"=",
"set",
"(",
")",
"if",
"cb_histogram",
":",
"cb_set",
"=",
"get_cb_depth_set",
"(",
"cb_histogram",
",",
"cb_cutoff",
")",
"sample_set",
"=",
"set",
"(",
")",
"batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"parsed",
"=",
"0",
"safe_makedir",
"(",
"out_dir",
")",
"for",
"read",
"in",
"read_fastq",
"(",
"fastq",
")",
":",
"parsed",
"+=",
"1",
"match",
"=",
"parser_re",
".",
"search",
"(",
"read",
")",
".",
"groupdict",
"(",
")",
"sample",
"=",
"match",
"[",
"'CB'",
"]",
"if",
"cb_set",
"and",
"sample",
"not",
"in",
"cb_set",
":",
"continue",
"sample_set",
".",
"add",
"(",
"sample",
")",
"batch",
"[",
"sample",
"]",
".",
"append",
"(",
"read",
")",
"# write in batches to avoid opening up file handles repeatedly",
"if",
"not",
"parsed",
"%",
"10000000",
":",
"for",
"sample",
",",
"reads",
"in",
"batch",
".",
"items",
"(",
")",
":",
"out_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"out_dir",
",",
"filestring",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
")",
"with",
"open",
"(",
"out_file",
",",
"\"a\"",
")",
"as",
"out_handle",
":",
"for",
"read",
"in",
"reads",
":",
"out_handle",
".",
"write",
"(",
"read",
")",
"batch",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"for",
"sample",
",",
"reads",
"in",
"batch",
".",
"items",
"(",
")",
":",
"out_file",
"=",
"os",
".",
"path",
".",
"join",
"(",
"out_dir",
",",
"filestring",
".",
"format",
"(",
"*",
"*",
"locals",
"(",
")",
")",
")",
"with",
"open",
"(",
"out_file",
",",
"\"a\"",
")",
"as",
"out_handle",
":",
"for",
"read",
"in",
"reads",
":",
"out_handle",
".",
"write",
"(",
"read",
")"
] |
Demultiplex a fastqtransformed FASTQ file into a FASTQ file for
each cell.
|
[
"Demultiplex",
"a",
"fastqtransformed",
"FASTQ",
"file",
"into",
"a",
"FASTQ",
"file",
"for",
"each",
"cell",
"."
] |
e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c
|
https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1317-L1355
|
18,812
|
Deepwalker/aldjemy
|
aldjemy/postgres.py
|
array_type
|
def array_type(data_types, field):
"""
Allows conversion of Django ArrayField to SQLAlchemy Array.
Takes care of mapping the type of the array element.
"""
from sqlalchemy.dialects import postgresql
internal_type = field.base_field.get_internal_type()
# currently no support for multi-dimensional arrays
if internal_type in data_types and internal_type != 'ArrayField':
sub_type = data_types[internal_type](field)
if not isinstance(sub_type, (list, tuple)):
sub_type = [sub_type]
else:
raise RuntimeError('Unsupported array element type')
return postgresql.ARRAY(sub_type)
|
python
|
def array_type(data_types, field):
"""
Allows conversion of Django ArrayField to SQLAlchemy Array.
Takes care of mapping the type of the array element.
"""
from sqlalchemy.dialects import postgresql
internal_type = field.base_field.get_internal_type()
# currently no support for multi-dimensional arrays
if internal_type in data_types and internal_type != 'ArrayField':
sub_type = data_types[internal_type](field)
if not isinstance(sub_type, (list, tuple)):
sub_type = [sub_type]
else:
raise RuntimeError('Unsupported array element type')
return postgresql.ARRAY(sub_type)
|
[
"def",
"array_type",
"(",
"data_types",
",",
"field",
")",
":",
"from",
"sqlalchemy",
".",
"dialects",
"import",
"postgresql",
"internal_type",
"=",
"field",
".",
"base_field",
".",
"get_internal_type",
"(",
")",
"# currently no support for multi-dimensional arrays",
"if",
"internal_type",
"in",
"data_types",
"and",
"internal_type",
"!=",
"'ArrayField'",
":",
"sub_type",
"=",
"data_types",
"[",
"internal_type",
"]",
"(",
"field",
")",
"if",
"not",
"isinstance",
"(",
"sub_type",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"sub_type",
"=",
"[",
"sub_type",
"]",
"else",
":",
"raise",
"RuntimeError",
"(",
"'Unsupported array element type'",
")",
"return",
"postgresql",
".",
"ARRAY",
"(",
"sub_type",
")"
] |
Allows conversion of Django ArrayField to SQLAlchemy Array.
Takes care of mapping the type of the array element.
|
[
"Allows",
"conversion",
"of",
"Django",
"ArrayField",
"to",
"SQLAlchemy",
"Array",
".",
"Takes",
"care",
"of",
"mapping",
"the",
"type",
"of",
"the",
"array",
"element",
"."
] |
d58359a3710e7f21e47a70765b9d75c61143ceb1
|
https://github.com/Deepwalker/aldjemy/blob/d58359a3710e7f21e47a70765b9d75c61143ceb1/aldjemy/postgres.py#L4-L21
|
18,813
|
Azure/blobxfer
|
blobxfer/util.py
|
set_verbose_logger_handlers
|
def set_verbose_logger_handlers(): # noqa
# type: (None) -> None
"""Set logger handler formatters to more detail"""
global _REGISTERED_LOGGER_HANDLERS
formatter = logging.Formatter(
'%(asctime)s %(levelname)s %(name)s:%(funcName)s:%(lineno)d '
'%(message)s')
formatter.default_msec_format = '%s.%03d'
for handler in _REGISTERED_LOGGER_HANDLERS:
handler.setFormatter(formatter)
|
python
|
def set_verbose_logger_handlers(): # noqa
# type: (None) -> None
"""Set logger handler formatters to more detail"""
global _REGISTERED_LOGGER_HANDLERS
formatter = logging.Formatter(
'%(asctime)s %(levelname)s %(name)s:%(funcName)s:%(lineno)d '
'%(message)s')
formatter.default_msec_format = '%s.%03d'
for handler in _REGISTERED_LOGGER_HANDLERS:
handler.setFormatter(formatter)
|
[
"def",
"set_verbose_logger_handlers",
"(",
")",
":",
"# noqa",
"# type: (None) -> None",
"global",
"_REGISTERED_LOGGER_HANDLERS",
"formatter",
"=",
"logging",
".",
"Formatter",
"(",
"'%(asctime)s %(levelname)s %(name)s:%(funcName)s:%(lineno)d '",
"'%(message)s'",
")",
"formatter",
".",
"default_msec_format",
"=",
"'%s.%03d'",
"for",
"handler",
"in",
"_REGISTERED_LOGGER_HANDLERS",
":",
"handler",
".",
"setFormatter",
"(",
"formatter",
")"
] |
Set logger handler formatters to more detail
|
[
"Set",
"logger",
"handler",
"formatters",
"to",
"more",
"detail"
] |
3eccbe7530cc6a20ab2d30f9e034b6f021817f34
|
https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/blobxfer/util.py#L114-L123
|
18,814
|
Azure/blobxfer
|
cli/cli.py
|
download
|
def download(ctx):
"""Download blobs or files from Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Download)
ctx.initialize(settings.TransferAction.Download)
specs = settings.create_download_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.Downloader(
ctx.general_options, ctx.credentials, spec
).start()
|
python
|
def download(ctx):
"""Download blobs or files from Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Download)
ctx.initialize(settings.TransferAction.Download)
specs = settings.create_download_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.Downloader(
ctx.general_options, ctx.credentials, spec
).start()
|
[
"def",
"download",
"(",
"ctx",
")",
":",
"settings",
".",
"add_cli_options",
"(",
"ctx",
".",
"cli_options",
",",
"settings",
".",
"TransferAction",
".",
"Download",
")",
"ctx",
".",
"initialize",
"(",
"settings",
".",
"TransferAction",
".",
"Download",
")",
"specs",
"=",
"settings",
".",
"create_download_specifications",
"(",
"ctx",
".",
"cli_options",
",",
"ctx",
".",
"config",
")",
"del",
"ctx",
".",
"cli_options",
"for",
"spec",
"in",
"specs",
":",
"blobxfer",
".",
"api",
".",
"Downloader",
"(",
"ctx",
".",
"general_options",
",",
"ctx",
".",
"credentials",
",",
"spec",
")",
".",
"start",
"(",
")"
] |
Download blobs or files from Azure Storage
|
[
"Download",
"blobs",
"or",
"files",
"from",
"Azure",
"Storage"
] |
3eccbe7530cc6a20ab2d30f9e034b6f021817f34
|
https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/cli/cli.py#L1071-L1081
|
18,815
|
Azure/blobxfer
|
cli/cli.py
|
synccopy
|
def synccopy(ctx):
"""Synchronously copy blobs or files between Azure Storage accounts"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Synccopy)
ctx.initialize(settings.TransferAction.Synccopy)
specs = settings.create_synccopy_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.SyncCopy(
ctx.general_options, ctx.credentials, spec
).start()
|
python
|
def synccopy(ctx):
"""Synchronously copy blobs or files between Azure Storage accounts"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Synccopy)
ctx.initialize(settings.TransferAction.Synccopy)
specs = settings.create_synccopy_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.SyncCopy(
ctx.general_options, ctx.credentials, spec
).start()
|
[
"def",
"synccopy",
"(",
"ctx",
")",
":",
"settings",
".",
"add_cli_options",
"(",
"ctx",
".",
"cli_options",
",",
"settings",
".",
"TransferAction",
".",
"Synccopy",
")",
"ctx",
".",
"initialize",
"(",
"settings",
".",
"TransferAction",
".",
"Synccopy",
")",
"specs",
"=",
"settings",
".",
"create_synccopy_specifications",
"(",
"ctx",
".",
"cli_options",
",",
"ctx",
".",
"config",
")",
"del",
"ctx",
".",
"cli_options",
"for",
"spec",
"in",
"specs",
":",
"blobxfer",
".",
"api",
".",
"SyncCopy",
"(",
"ctx",
".",
"general_options",
",",
"ctx",
".",
"credentials",
",",
"spec",
")",
".",
"start",
"(",
")"
] |
Synchronously copy blobs or files between Azure Storage accounts
|
[
"Synchronously",
"copy",
"blobs",
"or",
"files",
"between",
"Azure",
"Storage",
"accounts"
] |
3eccbe7530cc6a20ab2d30f9e034b6f021817f34
|
https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/cli/cli.py#L1088-L1098
|
18,816
|
Azure/blobxfer
|
cli/cli.py
|
upload
|
def upload(ctx):
"""Upload files to Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Upload)
ctx.initialize(settings.TransferAction.Upload)
specs = settings.create_upload_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.Uploader(
ctx.general_options, ctx.credentials, spec
).start()
|
python
|
def upload(ctx):
"""Upload files to Azure Storage"""
settings.add_cli_options(ctx.cli_options, settings.TransferAction.Upload)
ctx.initialize(settings.TransferAction.Upload)
specs = settings.create_upload_specifications(
ctx.cli_options, ctx.config)
del ctx.cli_options
for spec in specs:
blobxfer.api.Uploader(
ctx.general_options, ctx.credentials, spec
).start()
|
[
"def",
"upload",
"(",
"ctx",
")",
":",
"settings",
".",
"add_cli_options",
"(",
"ctx",
".",
"cli_options",
",",
"settings",
".",
"TransferAction",
".",
"Upload",
")",
"ctx",
".",
"initialize",
"(",
"settings",
".",
"TransferAction",
".",
"Upload",
")",
"specs",
"=",
"settings",
".",
"create_upload_specifications",
"(",
"ctx",
".",
"cli_options",
",",
"ctx",
".",
"config",
")",
"del",
"ctx",
".",
"cli_options",
"for",
"spec",
"in",
"specs",
":",
"blobxfer",
".",
"api",
".",
"Uploader",
"(",
"ctx",
".",
"general_options",
",",
"ctx",
".",
"credentials",
",",
"spec",
")",
".",
"start",
"(",
")"
] |
Upload files to Azure Storage
|
[
"Upload",
"files",
"to",
"Azure",
"Storage"
] |
3eccbe7530cc6a20ab2d30f9e034b6f021817f34
|
https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/cli/cli.py#L1106-L1116
|
18,817
|
knaperek/djangosaml2
|
djangosaml2/utils.py
|
get_idp_sso_supported_bindings
|
def get_idp_sso_supported_bindings(idp_entity_id=None, config=None):
"""Returns the list of bindings supported by an IDP
This is not clear in the pysaml2 code, so wrapping it in a util"""
if config is None:
# avoid circular import
from djangosaml2.conf import get_config
config = get_config()
# load metadata store from config
meta = getattr(config, 'metadata', {})
# if idp is None, assume only one exists so just use that
if idp_entity_id is None:
# .keys() returns dict_keys in python3.5+
try:
idp_entity_id = list(available_idps(config).keys())[0]
except IndexError:
raise ImproperlyConfigured("No IdP configured!")
try:
return meta.service(idp_entity_id, 'idpsso_descriptor', 'single_sign_on_service').keys()
except UnknownSystemEntity:
return []
|
python
|
def get_idp_sso_supported_bindings(idp_entity_id=None, config=None):
"""Returns the list of bindings supported by an IDP
This is not clear in the pysaml2 code, so wrapping it in a util"""
if config is None:
# avoid circular import
from djangosaml2.conf import get_config
config = get_config()
# load metadata store from config
meta = getattr(config, 'metadata', {})
# if idp is None, assume only one exists so just use that
if idp_entity_id is None:
# .keys() returns dict_keys in python3.5+
try:
idp_entity_id = list(available_idps(config).keys())[0]
except IndexError:
raise ImproperlyConfigured("No IdP configured!")
try:
return meta.service(idp_entity_id, 'idpsso_descriptor', 'single_sign_on_service').keys()
except UnknownSystemEntity:
return []
|
[
"def",
"get_idp_sso_supported_bindings",
"(",
"idp_entity_id",
"=",
"None",
",",
"config",
"=",
"None",
")",
":",
"if",
"config",
"is",
"None",
":",
"# avoid circular import",
"from",
"djangosaml2",
".",
"conf",
"import",
"get_config",
"config",
"=",
"get_config",
"(",
")",
"# load metadata store from config",
"meta",
"=",
"getattr",
"(",
"config",
",",
"'metadata'",
",",
"{",
"}",
")",
"# if idp is None, assume only one exists so just use that",
"if",
"idp_entity_id",
"is",
"None",
":",
"# .keys() returns dict_keys in python3.5+",
"try",
":",
"idp_entity_id",
"=",
"list",
"(",
"available_idps",
"(",
"config",
")",
".",
"keys",
"(",
")",
")",
"[",
"0",
"]",
"except",
"IndexError",
":",
"raise",
"ImproperlyConfigured",
"(",
"\"No IdP configured!\"",
")",
"try",
":",
"return",
"meta",
".",
"service",
"(",
"idp_entity_id",
",",
"'idpsso_descriptor'",
",",
"'single_sign_on_service'",
")",
".",
"keys",
"(",
")",
"except",
"UnknownSystemEntity",
":",
"return",
"[",
"]"
] |
Returns the list of bindings supported by an IDP
This is not clear in the pysaml2 code, so wrapping it in a util
|
[
"Returns",
"the",
"list",
"of",
"bindings",
"supported",
"by",
"an",
"IDP",
"This",
"is",
"not",
"clear",
"in",
"the",
"pysaml2",
"code",
"so",
"wrapping",
"it",
"in",
"a",
"util"
] |
643969701d3b4257a8d64c5c577602ebaa61de70
|
https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/utils.py#L41-L60
|
18,818
|
knaperek/djangosaml2
|
djangosaml2/utils.py
|
fail_acs_response
|
def fail_acs_response(request, *args, **kwargs):
""" Serves as a common mechanism for ending ACS in case of any SAML related failure.
Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as
suitable for the project.
The default behavior uses SAML specific template that is rendered on any ACS error,
but this can be simply changed so that PermissionDenied exception is raised instead.
"""
failure_function = import_string(get_custom_setting('SAML_ACS_FAILURE_RESPONSE_FUNCTION',
'djangosaml2.acs_failures.template_failure'))
return failure_function(request, *args, **kwargs)
|
python
|
def fail_acs_response(request, *args, **kwargs):
""" Serves as a common mechanism for ending ACS in case of any SAML related failure.
Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as
suitable for the project.
The default behavior uses SAML specific template that is rendered on any ACS error,
but this can be simply changed so that PermissionDenied exception is raised instead.
"""
failure_function = import_string(get_custom_setting('SAML_ACS_FAILURE_RESPONSE_FUNCTION',
'djangosaml2.acs_failures.template_failure'))
return failure_function(request, *args, **kwargs)
|
[
"def",
"fail_acs_response",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"failure_function",
"=",
"import_string",
"(",
"get_custom_setting",
"(",
"'SAML_ACS_FAILURE_RESPONSE_FUNCTION'",
",",
"'djangosaml2.acs_failures.template_failure'",
")",
")",
"return",
"failure_function",
"(",
"request",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")"
] |
Serves as a common mechanism for ending ACS in case of any SAML related failure.
Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as
suitable for the project.
The default behavior uses SAML specific template that is rendered on any ACS error,
but this can be simply changed so that PermissionDenied exception is raised instead.
|
[
"Serves",
"as",
"a",
"common",
"mechanism",
"for",
"ending",
"ACS",
"in",
"case",
"of",
"any",
"SAML",
"related",
"failure",
".",
"Handling",
"can",
"be",
"configured",
"by",
"setting",
"the",
"SAML_ACS_FAILURE_RESPONSE_FUNCTION",
"as",
"suitable",
"for",
"the",
"project",
"."
] |
643969701d3b4257a8d64c5c577602ebaa61de70
|
https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/utils.py#L72-L82
|
18,819
|
knaperek/djangosaml2
|
djangosaml2/views.py
|
echo_attributes
|
def echo_attributes(request,
config_loader_path=None,
template='djangosaml2/echo_attributes.html'):
"""Example view that echo the SAML attributes of an user"""
state = StateCache(request.session)
conf = get_config(config_loader_path, request)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
subject_id = _get_subject_id(request.session)
try:
identity = client.users.get_identity(subject_id,
check_not_on_or_after=False)
except AttributeError:
return HttpResponse("No active SAML identity found. Are you sure you have logged in via SAML?")
return render(request, template, {'attributes': identity[0]})
|
python
|
def echo_attributes(request,
config_loader_path=None,
template='djangosaml2/echo_attributes.html'):
"""Example view that echo the SAML attributes of an user"""
state = StateCache(request.session)
conf = get_config(config_loader_path, request)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
subject_id = _get_subject_id(request.session)
try:
identity = client.users.get_identity(subject_id,
check_not_on_or_after=False)
except AttributeError:
return HttpResponse("No active SAML identity found. Are you sure you have logged in via SAML?")
return render(request, template, {'attributes': identity[0]})
|
[
"def",
"echo_attributes",
"(",
"request",
",",
"config_loader_path",
"=",
"None",
",",
"template",
"=",
"'djangosaml2/echo_attributes.html'",
")",
":",
"state",
"=",
"StateCache",
"(",
"request",
".",
"session",
")",
"conf",
"=",
"get_config",
"(",
"config_loader_path",
",",
"request",
")",
"client",
"=",
"Saml2Client",
"(",
"conf",
",",
"state_cache",
"=",
"state",
",",
"identity_cache",
"=",
"IdentityCache",
"(",
"request",
".",
"session",
")",
")",
"subject_id",
"=",
"_get_subject_id",
"(",
"request",
".",
"session",
")",
"try",
":",
"identity",
"=",
"client",
".",
"users",
".",
"get_identity",
"(",
"subject_id",
",",
"check_not_on_or_after",
"=",
"False",
")",
"except",
"AttributeError",
":",
"return",
"HttpResponse",
"(",
"\"No active SAML identity found. Are you sure you have logged in via SAML?\"",
")",
"return",
"render",
"(",
"request",
",",
"template",
",",
"{",
"'attributes'",
":",
"identity",
"[",
"0",
"]",
"}",
")"
] |
Example view that echo the SAML attributes of an user
|
[
"Example",
"view",
"that",
"echo",
"the",
"SAML",
"attributes",
"of",
"an",
"user"
] |
643969701d3b4257a8d64c5c577602ebaa61de70
|
https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L342-L358
|
18,820
|
knaperek/djangosaml2
|
djangosaml2/views.py
|
logout
|
def logout(request, config_loader_path=None):
"""SAML Logout Request initiator
This view initiates the SAML2 Logout request
using the pysaml2 library to create the LogoutRequest.
"""
state = StateCache(request.session)
conf = get_config(config_loader_path, request)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
subject_id = _get_subject_id(request.session)
if subject_id is None:
logger.warning(
'The session does not contain the subject id for user %s',
request.user)
result = client.global_logout(subject_id)
state.sync()
if not result:
logger.error("Looks like the user %s is not logged in any IdP/AA", subject_id)
return HttpResponseBadRequest("You are not logged in any IdP/AA")
if len(result) > 1:
logger.error('Sorry, I do not know how to logout from several sources. I will logout just from the first one')
for entityid, logout_info in result.items():
if isinstance(logout_info, tuple):
binding, http_info = logout_info
if binding == BINDING_HTTP_POST:
logger.debug('Returning form to the IdP to continue the logout process')
body = ''.join(http_info['data'])
return HttpResponse(body)
elif binding == BINDING_HTTP_REDIRECT:
logger.debug('Redirecting to the IdP to continue the logout process')
return HttpResponseRedirect(get_location(http_info))
else:
logger.error('Unknown binding: %s', binding)
return HttpResponseServerError('Failed to log out')
else:
# We must have had a soap logout
return finish_logout(request, logout_info)
logger.error('Could not logout because there only the HTTP_REDIRECT is supported')
return HttpResponseServerError('Logout Binding not supported')
|
python
|
def logout(request, config_loader_path=None):
"""SAML Logout Request initiator
This view initiates the SAML2 Logout request
using the pysaml2 library to create the LogoutRequest.
"""
state = StateCache(request.session)
conf = get_config(config_loader_path, request)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
subject_id = _get_subject_id(request.session)
if subject_id is None:
logger.warning(
'The session does not contain the subject id for user %s',
request.user)
result = client.global_logout(subject_id)
state.sync()
if not result:
logger.error("Looks like the user %s is not logged in any IdP/AA", subject_id)
return HttpResponseBadRequest("You are not logged in any IdP/AA")
if len(result) > 1:
logger.error('Sorry, I do not know how to logout from several sources. I will logout just from the first one')
for entityid, logout_info in result.items():
if isinstance(logout_info, tuple):
binding, http_info = logout_info
if binding == BINDING_HTTP_POST:
logger.debug('Returning form to the IdP to continue the logout process')
body = ''.join(http_info['data'])
return HttpResponse(body)
elif binding == BINDING_HTTP_REDIRECT:
logger.debug('Redirecting to the IdP to continue the logout process')
return HttpResponseRedirect(get_location(http_info))
else:
logger.error('Unknown binding: %s', binding)
return HttpResponseServerError('Failed to log out')
else:
# We must have had a soap logout
return finish_logout(request, logout_info)
logger.error('Could not logout because there only the HTTP_REDIRECT is supported')
return HttpResponseServerError('Logout Binding not supported')
|
[
"def",
"logout",
"(",
"request",
",",
"config_loader_path",
"=",
"None",
")",
":",
"state",
"=",
"StateCache",
"(",
"request",
".",
"session",
")",
"conf",
"=",
"get_config",
"(",
"config_loader_path",
",",
"request",
")",
"client",
"=",
"Saml2Client",
"(",
"conf",
",",
"state_cache",
"=",
"state",
",",
"identity_cache",
"=",
"IdentityCache",
"(",
"request",
".",
"session",
")",
")",
"subject_id",
"=",
"_get_subject_id",
"(",
"request",
".",
"session",
")",
"if",
"subject_id",
"is",
"None",
":",
"logger",
".",
"warning",
"(",
"'The session does not contain the subject id for user %s'",
",",
"request",
".",
"user",
")",
"result",
"=",
"client",
".",
"global_logout",
"(",
"subject_id",
")",
"state",
".",
"sync",
"(",
")",
"if",
"not",
"result",
":",
"logger",
".",
"error",
"(",
"\"Looks like the user %s is not logged in any IdP/AA\"",
",",
"subject_id",
")",
"return",
"HttpResponseBadRequest",
"(",
"\"You are not logged in any IdP/AA\"",
")",
"if",
"len",
"(",
"result",
")",
">",
"1",
":",
"logger",
".",
"error",
"(",
"'Sorry, I do not know how to logout from several sources. I will logout just from the first one'",
")",
"for",
"entityid",
",",
"logout_info",
"in",
"result",
".",
"items",
"(",
")",
":",
"if",
"isinstance",
"(",
"logout_info",
",",
"tuple",
")",
":",
"binding",
",",
"http_info",
"=",
"logout_info",
"if",
"binding",
"==",
"BINDING_HTTP_POST",
":",
"logger",
".",
"debug",
"(",
"'Returning form to the IdP to continue the logout process'",
")",
"body",
"=",
"''",
".",
"join",
"(",
"http_info",
"[",
"'data'",
"]",
")",
"return",
"HttpResponse",
"(",
"body",
")",
"elif",
"binding",
"==",
"BINDING_HTTP_REDIRECT",
":",
"logger",
".",
"debug",
"(",
"'Redirecting to the IdP to continue the logout process'",
")",
"return",
"HttpResponseRedirect",
"(",
"get_location",
"(",
"http_info",
")",
")",
"else",
":",
"logger",
".",
"error",
"(",
"'Unknown binding: %s'",
",",
"binding",
")",
"return",
"HttpResponseServerError",
"(",
"'Failed to log out'",
")",
"else",
":",
"# We must have had a soap logout",
"return",
"finish_logout",
"(",
"request",
",",
"logout_info",
")",
"logger",
".",
"error",
"(",
"'Could not logout because there only the HTTP_REDIRECT is supported'",
")",
"return",
"HttpResponseServerError",
"(",
"'Logout Binding not supported'",
")"
] |
SAML Logout Request initiator
This view initiates the SAML2 Logout request
using the pysaml2 library to create the LogoutRequest.
|
[
"SAML",
"Logout",
"Request",
"initiator"
] |
643969701d3b4257a8d64c5c577602ebaa61de70
|
https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L362-L408
|
18,821
|
knaperek/djangosaml2
|
djangosaml2/views.py
|
do_logout_service
|
def do_logout_service(request, data, binding, config_loader_path=None, next_page=None,
logout_error_template='djangosaml2/logout_error.html'):
"""SAML Logout Response endpoint
The IdP will send the logout response to this view,
which will process it with pysaml2 help and log the user
out.
Note that the IdP can request a logout even when
we didn't initiate the process as a single logout
request started by another SP.
"""
logger.debug('Logout service started')
conf = get_config(config_loader_path, request)
state = StateCache(request.session)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
if 'SAMLResponse' in data: # we started the logout
logger.debug('Receiving a logout response from the IdP')
response = client.parse_logout_request_response(data['SAMLResponse'], binding)
state.sync()
return finish_logout(request, response, next_page=next_page)
elif 'SAMLRequest' in data: # logout started by the IdP
logger.debug('Receiving a logout request from the IdP')
subject_id = _get_subject_id(request.session)
if subject_id is None:
logger.warning(
'The session does not contain the subject id for user %s. Performing local logout',
request.user)
auth.logout(request)
return render(request, logout_error_template, status=403)
else:
http_info = client.handle_logout_request(
data['SAMLRequest'],
subject_id,
binding,
relay_state=data.get('RelayState', ''))
state.sync()
auth.logout(request)
return HttpResponseRedirect(get_location(http_info))
else:
logger.error('No SAMLResponse or SAMLRequest parameter found')
raise Http404('No SAMLResponse or SAMLRequest parameter found')
|
python
|
def do_logout_service(request, data, binding, config_loader_path=None, next_page=None,
logout_error_template='djangosaml2/logout_error.html'):
"""SAML Logout Response endpoint
The IdP will send the logout response to this view,
which will process it with pysaml2 help and log the user
out.
Note that the IdP can request a logout even when
we didn't initiate the process as a single logout
request started by another SP.
"""
logger.debug('Logout service started')
conf = get_config(config_loader_path, request)
state = StateCache(request.session)
client = Saml2Client(conf, state_cache=state,
identity_cache=IdentityCache(request.session))
if 'SAMLResponse' in data: # we started the logout
logger.debug('Receiving a logout response from the IdP')
response = client.parse_logout_request_response(data['SAMLResponse'], binding)
state.sync()
return finish_logout(request, response, next_page=next_page)
elif 'SAMLRequest' in data: # logout started by the IdP
logger.debug('Receiving a logout request from the IdP')
subject_id = _get_subject_id(request.session)
if subject_id is None:
logger.warning(
'The session does not contain the subject id for user %s. Performing local logout',
request.user)
auth.logout(request)
return render(request, logout_error_template, status=403)
else:
http_info = client.handle_logout_request(
data['SAMLRequest'],
subject_id,
binding,
relay_state=data.get('RelayState', ''))
state.sync()
auth.logout(request)
return HttpResponseRedirect(get_location(http_info))
else:
logger.error('No SAMLResponse or SAMLRequest parameter found')
raise Http404('No SAMLResponse or SAMLRequest parameter found')
|
[
"def",
"do_logout_service",
"(",
"request",
",",
"data",
",",
"binding",
",",
"config_loader_path",
"=",
"None",
",",
"next_page",
"=",
"None",
",",
"logout_error_template",
"=",
"'djangosaml2/logout_error.html'",
")",
":",
"logger",
".",
"debug",
"(",
"'Logout service started'",
")",
"conf",
"=",
"get_config",
"(",
"config_loader_path",
",",
"request",
")",
"state",
"=",
"StateCache",
"(",
"request",
".",
"session",
")",
"client",
"=",
"Saml2Client",
"(",
"conf",
",",
"state_cache",
"=",
"state",
",",
"identity_cache",
"=",
"IdentityCache",
"(",
"request",
".",
"session",
")",
")",
"if",
"'SAMLResponse'",
"in",
"data",
":",
"# we started the logout",
"logger",
".",
"debug",
"(",
"'Receiving a logout response from the IdP'",
")",
"response",
"=",
"client",
".",
"parse_logout_request_response",
"(",
"data",
"[",
"'SAMLResponse'",
"]",
",",
"binding",
")",
"state",
".",
"sync",
"(",
")",
"return",
"finish_logout",
"(",
"request",
",",
"response",
",",
"next_page",
"=",
"next_page",
")",
"elif",
"'SAMLRequest'",
"in",
"data",
":",
"# logout started by the IdP",
"logger",
".",
"debug",
"(",
"'Receiving a logout request from the IdP'",
")",
"subject_id",
"=",
"_get_subject_id",
"(",
"request",
".",
"session",
")",
"if",
"subject_id",
"is",
"None",
":",
"logger",
".",
"warning",
"(",
"'The session does not contain the subject id for user %s. Performing local logout'",
",",
"request",
".",
"user",
")",
"auth",
".",
"logout",
"(",
"request",
")",
"return",
"render",
"(",
"request",
",",
"logout_error_template",
",",
"status",
"=",
"403",
")",
"else",
":",
"http_info",
"=",
"client",
".",
"handle_logout_request",
"(",
"data",
"[",
"'SAMLRequest'",
"]",
",",
"subject_id",
",",
"binding",
",",
"relay_state",
"=",
"data",
".",
"get",
"(",
"'RelayState'",
",",
"''",
")",
")",
"state",
".",
"sync",
"(",
")",
"auth",
".",
"logout",
"(",
"request",
")",
"return",
"HttpResponseRedirect",
"(",
"get_location",
"(",
"http_info",
")",
")",
"else",
":",
"logger",
".",
"error",
"(",
"'No SAMLResponse or SAMLRequest parameter found'",
")",
"raise",
"Http404",
"(",
"'No SAMLResponse or SAMLRequest parameter found'",
")"
] |
SAML Logout Response endpoint
The IdP will send the logout response to this view,
which will process it with pysaml2 help and log the user
out.
Note that the IdP can request a logout even when
we didn't initiate the process as a single logout
request started by another SP.
|
[
"SAML",
"Logout",
"Response",
"endpoint"
] |
643969701d3b4257a8d64c5c577602ebaa61de70
|
https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L420-L464
|
18,822
|
knaperek/djangosaml2
|
djangosaml2/views.py
|
metadata
|
def metadata(request, config_loader_path=None, valid_for=None):
"""Returns an XML with the SAML 2.0 metadata for this
SP as configured in the settings.py file.
"""
conf = get_config(config_loader_path, request)
metadata = entity_descriptor(conf)
return HttpResponse(content=text_type(metadata).encode('utf-8'),
content_type="text/xml; charset=utf8")
|
python
|
def metadata(request, config_loader_path=None, valid_for=None):
"""Returns an XML with the SAML 2.0 metadata for this
SP as configured in the settings.py file.
"""
conf = get_config(config_loader_path, request)
metadata = entity_descriptor(conf)
return HttpResponse(content=text_type(metadata).encode('utf-8'),
content_type="text/xml; charset=utf8")
|
[
"def",
"metadata",
"(",
"request",
",",
"config_loader_path",
"=",
"None",
",",
"valid_for",
"=",
"None",
")",
":",
"conf",
"=",
"get_config",
"(",
"config_loader_path",
",",
"request",
")",
"metadata",
"=",
"entity_descriptor",
"(",
"conf",
")",
"return",
"HttpResponse",
"(",
"content",
"=",
"text_type",
"(",
"metadata",
")",
".",
"encode",
"(",
"'utf-8'",
")",
",",
"content_type",
"=",
"\"text/xml; charset=utf8\"",
")"
] |
Returns an XML with the SAML 2.0 metadata for this
SP as configured in the settings.py file.
|
[
"Returns",
"an",
"XML",
"with",
"the",
"SAML",
"2",
".",
"0",
"metadata",
"for",
"this",
"SP",
"as",
"configured",
"in",
"the",
"settings",
".",
"py",
"file",
"."
] |
643969701d3b4257a8d64c5c577602ebaa61de70
|
https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L479-L486
|
18,823
|
knaperek/djangosaml2
|
djangosaml2/backends.py
|
Saml2Backend.configure_user
|
def configure_user(self, user, attributes, attribute_mapping):
"""Configures a user after creation and returns the updated user.
By default, returns the user with his attributes updated.
"""
user.set_unusable_password()
return self.update_user(user, attributes, attribute_mapping,
force_save=True)
|
python
|
def configure_user(self, user, attributes, attribute_mapping):
"""Configures a user after creation and returns the updated user.
By default, returns the user with his attributes updated.
"""
user.set_unusable_password()
return self.update_user(user, attributes, attribute_mapping,
force_save=True)
|
[
"def",
"configure_user",
"(",
"self",
",",
"user",
",",
"attributes",
",",
"attribute_mapping",
")",
":",
"user",
".",
"set_unusable_password",
"(",
")",
"return",
"self",
".",
"update_user",
"(",
"user",
",",
"attributes",
",",
"attribute_mapping",
",",
"force_save",
"=",
"True",
")"
] |
Configures a user after creation and returns the updated user.
By default, returns the user with his attributes updated.
|
[
"Configures",
"a",
"user",
"after",
"creation",
"and",
"returns",
"the",
"updated",
"user",
"."
] |
643969701d3b4257a8d64c5c577602ebaa61de70
|
https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/backends.py#L198-L205
|
18,824
|
knaperek/djangosaml2
|
djangosaml2/backends.py
|
Saml2Backend.update_user
|
def update_user(self, user, attributes, attribute_mapping,
force_save=False):
"""Update a user with a set of attributes and returns the updated user.
By default it uses a mapping defined in the settings constant
SAML_ATTRIBUTE_MAPPING. For each attribute, if the user object has
that field defined it will be set.
"""
if not attribute_mapping:
return user
user_modified = False
for saml_attr, django_attrs in attribute_mapping.items():
attr_value_list = attributes.get(saml_attr)
if not attr_value_list:
logger.debug(
'Could not find value for "%s", not updating fields "%s"',
saml_attr, django_attrs)
continue
for attr in django_attrs:
if hasattr(user, attr):
user_attr = getattr(user, attr)
if callable(user_attr):
modified = user_attr(attr_value_list)
else:
modified = self._set_attribute(user, attr, attr_value_list[0])
user_modified = user_modified or modified
else:
logger.debug(
'Could not find attribute "%s" on user "%s"', attr, user)
logger.debug('Sending the pre_save signal')
signal_modified = any(
[response for receiver, response
in pre_user_save.send_robust(sender=user.__class__,
instance=user,
attributes=attributes,
user_modified=user_modified)]
)
if user_modified or signal_modified or force_save:
user.save()
return user
|
python
|
def update_user(self, user, attributes, attribute_mapping,
force_save=False):
"""Update a user with a set of attributes and returns the updated user.
By default it uses a mapping defined in the settings constant
SAML_ATTRIBUTE_MAPPING. For each attribute, if the user object has
that field defined it will be set.
"""
if not attribute_mapping:
return user
user_modified = False
for saml_attr, django_attrs in attribute_mapping.items():
attr_value_list = attributes.get(saml_attr)
if not attr_value_list:
logger.debug(
'Could not find value for "%s", not updating fields "%s"',
saml_attr, django_attrs)
continue
for attr in django_attrs:
if hasattr(user, attr):
user_attr = getattr(user, attr)
if callable(user_attr):
modified = user_attr(attr_value_list)
else:
modified = self._set_attribute(user, attr, attr_value_list[0])
user_modified = user_modified or modified
else:
logger.debug(
'Could not find attribute "%s" on user "%s"', attr, user)
logger.debug('Sending the pre_save signal')
signal_modified = any(
[response for receiver, response
in pre_user_save.send_robust(sender=user.__class__,
instance=user,
attributes=attributes,
user_modified=user_modified)]
)
if user_modified or signal_modified or force_save:
user.save()
return user
|
[
"def",
"update_user",
"(",
"self",
",",
"user",
",",
"attributes",
",",
"attribute_mapping",
",",
"force_save",
"=",
"False",
")",
":",
"if",
"not",
"attribute_mapping",
":",
"return",
"user",
"user_modified",
"=",
"False",
"for",
"saml_attr",
",",
"django_attrs",
"in",
"attribute_mapping",
".",
"items",
"(",
")",
":",
"attr_value_list",
"=",
"attributes",
".",
"get",
"(",
"saml_attr",
")",
"if",
"not",
"attr_value_list",
":",
"logger",
".",
"debug",
"(",
"'Could not find value for \"%s\", not updating fields \"%s\"'",
",",
"saml_attr",
",",
"django_attrs",
")",
"continue",
"for",
"attr",
"in",
"django_attrs",
":",
"if",
"hasattr",
"(",
"user",
",",
"attr",
")",
":",
"user_attr",
"=",
"getattr",
"(",
"user",
",",
"attr",
")",
"if",
"callable",
"(",
"user_attr",
")",
":",
"modified",
"=",
"user_attr",
"(",
"attr_value_list",
")",
"else",
":",
"modified",
"=",
"self",
".",
"_set_attribute",
"(",
"user",
",",
"attr",
",",
"attr_value_list",
"[",
"0",
"]",
")",
"user_modified",
"=",
"user_modified",
"or",
"modified",
"else",
":",
"logger",
".",
"debug",
"(",
"'Could not find attribute \"%s\" on user \"%s\"'",
",",
"attr",
",",
"user",
")",
"logger",
".",
"debug",
"(",
"'Sending the pre_save signal'",
")",
"signal_modified",
"=",
"any",
"(",
"[",
"response",
"for",
"receiver",
",",
"response",
"in",
"pre_user_save",
".",
"send_robust",
"(",
"sender",
"=",
"user",
".",
"__class__",
",",
"instance",
"=",
"user",
",",
"attributes",
"=",
"attributes",
",",
"user_modified",
"=",
"user_modified",
")",
"]",
")",
"if",
"user_modified",
"or",
"signal_modified",
"or",
"force_save",
":",
"user",
".",
"save",
"(",
")",
"return",
"user"
] |
Update a user with a set of attributes and returns the updated user.
By default it uses a mapping defined in the settings constant
SAML_ATTRIBUTE_MAPPING. For each attribute, if the user object has
that field defined it will be set.
|
[
"Update",
"a",
"user",
"with",
"a",
"set",
"of",
"attributes",
"and",
"returns",
"the",
"updated",
"user",
"."
] |
643969701d3b4257a8d64c5c577602ebaa61de70
|
https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/backends.py#L207-L252
|
18,825
|
knaperek/djangosaml2
|
djangosaml2/backends.py
|
Saml2Backend._set_attribute
|
def _set_attribute(self, obj, attr, value):
"""Set an attribute of an object to a specific value.
Return True if the attribute was changed and False otherwise.
"""
field = obj._meta.get_field(attr)
if field.max_length is not None and len(value) > field.max_length:
cleaned_value = value[:field.max_length]
logger.warn('The attribute "%s" was trimmed from "%s" to "%s"',
attr, value, cleaned_value)
else:
cleaned_value = value
old_value = getattr(obj, attr)
if cleaned_value != old_value:
setattr(obj, attr, cleaned_value)
return True
return False
|
python
|
def _set_attribute(self, obj, attr, value):
"""Set an attribute of an object to a specific value.
Return True if the attribute was changed and False otherwise.
"""
field = obj._meta.get_field(attr)
if field.max_length is not None and len(value) > field.max_length:
cleaned_value = value[:field.max_length]
logger.warn('The attribute "%s" was trimmed from "%s" to "%s"',
attr, value, cleaned_value)
else:
cleaned_value = value
old_value = getattr(obj, attr)
if cleaned_value != old_value:
setattr(obj, attr, cleaned_value)
return True
return False
|
[
"def",
"_set_attribute",
"(",
"self",
",",
"obj",
",",
"attr",
",",
"value",
")",
":",
"field",
"=",
"obj",
".",
"_meta",
".",
"get_field",
"(",
"attr",
")",
"if",
"field",
".",
"max_length",
"is",
"not",
"None",
"and",
"len",
"(",
"value",
")",
">",
"field",
".",
"max_length",
":",
"cleaned_value",
"=",
"value",
"[",
":",
"field",
".",
"max_length",
"]",
"logger",
".",
"warn",
"(",
"'The attribute \"%s\" was trimmed from \"%s\" to \"%s\"'",
",",
"attr",
",",
"value",
",",
"cleaned_value",
")",
"else",
":",
"cleaned_value",
"=",
"value",
"old_value",
"=",
"getattr",
"(",
"obj",
",",
"attr",
")",
"if",
"cleaned_value",
"!=",
"old_value",
":",
"setattr",
"(",
"obj",
",",
"attr",
",",
"cleaned_value",
")",
"return",
"True",
"return",
"False"
] |
Set an attribute of an object to a specific value.
Return True if the attribute was changed and False otherwise.
|
[
"Set",
"an",
"attribute",
"of",
"an",
"object",
"to",
"a",
"specific",
"value",
"."
] |
643969701d3b4257a8d64c5c577602ebaa61de70
|
https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/backends.py#L254-L272
|
18,826
|
knaperek/djangosaml2
|
djangosaml2/conf.py
|
config_settings_loader
|
def config_settings_loader(request=None):
"""Utility function to load the pysaml2 configuration.
This is also the default config loader.
"""
conf = SPConfig()
conf.load(copy.deepcopy(settings.SAML_CONFIG))
return conf
|
python
|
def config_settings_loader(request=None):
"""Utility function to load the pysaml2 configuration.
This is also the default config loader.
"""
conf = SPConfig()
conf.load(copy.deepcopy(settings.SAML_CONFIG))
return conf
|
[
"def",
"config_settings_loader",
"(",
"request",
"=",
"None",
")",
":",
"conf",
"=",
"SPConfig",
"(",
")",
"conf",
".",
"load",
"(",
"copy",
".",
"deepcopy",
"(",
"settings",
".",
"SAML_CONFIG",
")",
")",
"return",
"conf"
] |
Utility function to load the pysaml2 configuration.
This is also the default config loader.
|
[
"Utility",
"function",
"to",
"load",
"the",
"pysaml2",
"configuration",
"."
] |
643969701d3b4257a8d64c5c577602ebaa61de70
|
https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/conf.py#L55-L62
|
18,827
|
basho/riak-python-client
|
riak/transports/http/resources.py
|
mkpath
|
def mkpath(*segments, **query):
"""
Constructs the path & query portion of a URI from path segments
and a dict.
"""
# Remove empty segments (e.g. no key specified)
segments = [bytes_to_str(s) for s in segments if s is not None]
# Join the segments into a path
pathstring = '/'.join(segments)
# Remove extra slashes
pathstring = re.sub('/+', '/', pathstring)
# Add the query string if it exists
_query = {}
for key in query:
if query[key] in [False, True]:
_query[key] = str(query[key]).lower()
elif query[key] is not None:
if PY2 and isinstance(query[key], unicode): # noqa
_query[key] = query[key].encode('utf-8')
else:
_query[key] = query[key]
if len(_query) > 0:
pathstring += "?" + urlencode(_query)
if not pathstring.startswith('/'):
pathstring = '/' + pathstring
return pathstring
|
python
|
def mkpath(*segments, **query):
"""
Constructs the path & query portion of a URI from path segments
and a dict.
"""
# Remove empty segments (e.g. no key specified)
segments = [bytes_to_str(s) for s in segments if s is not None]
# Join the segments into a path
pathstring = '/'.join(segments)
# Remove extra slashes
pathstring = re.sub('/+', '/', pathstring)
# Add the query string if it exists
_query = {}
for key in query:
if query[key] in [False, True]:
_query[key] = str(query[key]).lower()
elif query[key] is not None:
if PY2 and isinstance(query[key], unicode): # noqa
_query[key] = query[key].encode('utf-8')
else:
_query[key] = query[key]
if len(_query) > 0:
pathstring += "?" + urlencode(_query)
if not pathstring.startswith('/'):
pathstring = '/' + pathstring
return pathstring
|
[
"def",
"mkpath",
"(",
"*",
"segments",
",",
"*",
"*",
"query",
")",
":",
"# Remove empty segments (e.g. no key specified)",
"segments",
"=",
"[",
"bytes_to_str",
"(",
"s",
")",
"for",
"s",
"in",
"segments",
"if",
"s",
"is",
"not",
"None",
"]",
"# Join the segments into a path",
"pathstring",
"=",
"'/'",
".",
"join",
"(",
"segments",
")",
"# Remove extra slashes",
"pathstring",
"=",
"re",
".",
"sub",
"(",
"'/+'",
",",
"'/'",
",",
"pathstring",
")",
"# Add the query string if it exists",
"_query",
"=",
"{",
"}",
"for",
"key",
"in",
"query",
":",
"if",
"query",
"[",
"key",
"]",
"in",
"[",
"False",
",",
"True",
"]",
":",
"_query",
"[",
"key",
"]",
"=",
"str",
"(",
"query",
"[",
"key",
"]",
")",
".",
"lower",
"(",
")",
"elif",
"query",
"[",
"key",
"]",
"is",
"not",
"None",
":",
"if",
"PY2",
"and",
"isinstance",
"(",
"query",
"[",
"key",
"]",
",",
"unicode",
")",
":",
"# noqa",
"_query",
"[",
"key",
"]",
"=",
"query",
"[",
"key",
"]",
".",
"encode",
"(",
"'utf-8'",
")",
"else",
":",
"_query",
"[",
"key",
"]",
"=",
"query",
"[",
"key",
"]",
"if",
"len",
"(",
"_query",
")",
">",
"0",
":",
"pathstring",
"+=",
"\"?\"",
"+",
"urlencode",
"(",
"_query",
")",
"if",
"not",
"pathstring",
".",
"startswith",
"(",
"'/'",
")",
":",
"pathstring",
"=",
"'/'",
"+",
"pathstring",
"return",
"pathstring"
] |
Constructs the path & query portion of a URI from path segments
and a dict.
|
[
"Constructs",
"the",
"path",
"&",
"query",
"portion",
"of",
"a",
"URI",
"from",
"path",
"segments",
"and",
"a",
"dict",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/resources.py#L275-L304
|
18,828
|
basho/riak-python-client
|
riak/transports/http/resources.py
|
HttpResources.search_index_path
|
def search_index_path(self, index=None, **options):
"""
Builds a Yokozuna search index URL.
:param index: optional name of a yz index
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
"""
if not self.yz_wm_index:
raise RiakError("Yokozuna search is unsupported by this Riak node")
if index:
quote_plus(index)
return mkpath(self.yz_wm_index, "index", index, **options)
|
python
|
def search_index_path(self, index=None, **options):
"""
Builds a Yokozuna search index URL.
:param index: optional name of a yz index
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
"""
if not self.yz_wm_index:
raise RiakError("Yokozuna search is unsupported by this Riak node")
if index:
quote_plus(index)
return mkpath(self.yz_wm_index, "index", index, **options)
|
[
"def",
"search_index_path",
"(",
"self",
",",
"index",
"=",
"None",
",",
"*",
"*",
"options",
")",
":",
"if",
"not",
"self",
".",
"yz_wm_index",
":",
"raise",
"RiakError",
"(",
"\"Yokozuna search is unsupported by this Riak node\"",
")",
"if",
"index",
":",
"quote_plus",
"(",
"index",
")",
"return",
"mkpath",
"(",
"self",
".",
"yz_wm_index",
",",
"\"index\"",
",",
"index",
",",
"*",
"*",
"options",
")"
] |
Builds a Yokozuna search index URL.
:param index: optional name of a yz index
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
|
[
"Builds",
"a",
"Yokozuna",
"search",
"index",
"URL",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/resources.py#L110-L124
|
18,829
|
basho/riak-python-client
|
riak/transports/http/resources.py
|
HttpResources.search_schema_path
|
def search_schema_path(self, index, **options):
"""
Builds a Yokozuna search Solr schema URL.
:param index: a name of a yz solr schema
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
"""
if not self.yz_wm_schema:
raise RiakError("Yokozuna search is unsupported by this Riak node")
return mkpath(self.yz_wm_schema, "schema", quote_plus(index),
**options)
|
python
|
def search_schema_path(self, index, **options):
"""
Builds a Yokozuna search Solr schema URL.
:param index: a name of a yz solr schema
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
"""
if not self.yz_wm_schema:
raise RiakError("Yokozuna search is unsupported by this Riak node")
return mkpath(self.yz_wm_schema, "schema", quote_plus(index),
**options)
|
[
"def",
"search_schema_path",
"(",
"self",
",",
"index",
",",
"*",
"*",
"options",
")",
":",
"if",
"not",
"self",
".",
"yz_wm_schema",
":",
"raise",
"RiakError",
"(",
"\"Yokozuna search is unsupported by this Riak node\"",
")",
"return",
"mkpath",
"(",
"self",
".",
"yz_wm_schema",
",",
"\"schema\"",
",",
"quote_plus",
"(",
"index",
")",
",",
"*",
"*",
"options",
")"
] |
Builds a Yokozuna search Solr schema URL.
:param index: a name of a yz solr schema
:type index: string
:param options: optional list of additional arguments
:type index: dict
:rtype URL string
|
[
"Builds",
"a",
"Yokozuna",
"search",
"Solr",
"schema",
"URL",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/resources.py#L126-L139
|
18,830
|
basho/riak-python-client
|
riak/datatypes/hll.py
|
Hll.to_op
|
def to_op(self):
"""
Extracts the modification operation from the Hll.
:rtype: dict, None
"""
if not self._adds:
return None
changes = {}
if self._adds:
changes['adds'] = list(self._adds)
return changes
|
python
|
def to_op(self):
"""
Extracts the modification operation from the Hll.
:rtype: dict, None
"""
if not self._adds:
return None
changes = {}
if self._adds:
changes['adds'] = list(self._adds)
return changes
|
[
"def",
"to_op",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_adds",
":",
"return",
"None",
"changes",
"=",
"{",
"}",
"if",
"self",
".",
"_adds",
":",
"changes",
"[",
"'adds'",
"]",
"=",
"list",
"(",
"self",
".",
"_adds",
")",
"return",
"changes"
] |
Extracts the modification operation from the Hll.
:rtype: dict, None
|
[
"Extracts",
"the",
"modification",
"operation",
"from",
"the",
"Hll",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/hll.py#L49-L60
|
18,831
|
basho/riak-python-client
|
riak/datatypes/hll.py
|
Hll.add
|
def add(self, element):
"""
Adds an element to the HyperLogLog. Datatype cardinality will
be updated when the object is saved.
:param element: the element to add
:type element: str
"""
if not isinstance(element, six.string_types):
raise TypeError("Hll elements can only be strings")
self._adds.add(element)
|
python
|
def add(self, element):
"""
Adds an element to the HyperLogLog. Datatype cardinality will
be updated when the object is saved.
:param element: the element to add
:type element: str
"""
if not isinstance(element, six.string_types):
raise TypeError("Hll elements can only be strings")
self._adds.add(element)
|
[
"def",
"add",
"(",
"self",
",",
"element",
")",
":",
"if",
"not",
"isinstance",
"(",
"element",
",",
"six",
".",
"string_types",
")",
":",
"raise",
"TypeError",
"(",
"\"Hll elements can only be strings\"",
")",
"self",
".",
"_adds",
".",
"add",
"(",
"element",
")"
] |
Adds an element to the HyperLogLog. Datatype cardinality will
be updated when the object is saved.
:param element: the element to add
:type element: str
|
[
"Adds",
"an",
"element",
"to",
"the",
"HyperLogLog",
".",
"Datatype",
"cardinality",
"will",
"be",
"updated",
"when",
"the",
"object",
"is",
"saved",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/hll.py#L62-L72
|
18,832
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.ping
|
def ping(self):
"""
Check server is alive over HTTP
"""
status, _, body = self._request('GET', self.ping_path())
return(status is not None) and (bytes_to_str(body) == 'OK')
|
python
|
def ping(self):
"""
Check server is alive over HTTP
"""
status, _, body = self._request('GET', self.ping_path())
return(status is not None) and (bytes_to_str(body) == 'OK')
|
[
"def",
"ping",
"(",
"self",
")",
":",
"status",
",",
"_",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"self",
".",
"ping_path",
"(",
")",
")",
"return",
"(",
"status",
"is",
"not",
"None",
")",
"and",
"(",
"bytes_to_str",
"(",
"body",
")",
"==",
"'OK'",
")"
] |
Check server is alive over HTTP
|
[
"Check",
"server",
"is",
"alive",
"over",
"HTTP"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L68-L73
|
18,833
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.stats
|
def stats(self):
"""
Gets performance statistics and server information
"""
status, _, body = self._request('GET', self.stats_path(),
{'Accept': 'application/json'})
if status == 200:
return json.loads(bytes_to_str(body))
else:
return None
|
python
|
def stats(self):
"""
Gets performance statistics and server information
"""
status, _, body = self._request('GET', self.stats_path(),
{'Accept': 'application/json'})
if status == 200:
return json.loads(bytes_to_str(body))
else:
return None
|
[
"def",
"stats",
"(",
"self",
")",
":",
"status",
",",
"_",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"self",
".",
"stats_path",
"(",
")",
",",
"{",
"'Accept'",
":",
"'application/json'",
"}",
")",
"if",
"status",
"==",
"200",
":",
"return",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"else",
":",
"return",
"None"
] |
Gets performance statistics and server information
|
[
"Gets",
"performance",
"statistics",
"and",
"server",
"information"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L75-L84
|
18,834
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.get_keys
|
def get_keys(self, bucket, timeout=None):
"""
Fetch a list of keys for the bucket
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.key_list_path(bucket.name, bucket_type=bucket_type,
timeout=timeout)
status, _, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['keys']
else:
raise RiakError('Error listing keys.')
|
python
|
def get_keys(self, bucket, timeout=None):
"""
Fetch a list of keys for the bucket
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.key_list_path(bucket.name, bucket_type=bucket_type,
timeout=timeout)
status, _, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['keys']
else:
raise RiakError('Error listing keys.')
|
[
"def",
"get_keys",
"(",
"self",
",",
"bucket",
",",
"timeout",
"=",
"None",
")",
":",
"bucket_type",
"=",
"self",
".",
"_get_bucket_type",
"(",
"bucket",
".",
"bucket_type",
")",
"url",
"=",
"self",
".",
"key_list_path",
"(",
"bucket",
".",
"name",
",",
"bucket_type",
"=",
"bucket_type",
",",
"timeout",
"=",
"timeout",
")",
"status",
",",
"_",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"if",
"status",
"==",
"200",
":",
"props",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"return",
"props",
"[",
"'keys'",
"]",
"else",
":",
"raise",
"RiakError",
"(",
"'Error listing keys.'",
")"
] |
Fetch a list of keys for the bucket
|
[
"Fetch",
"a",
"list",
"of",
"keys",
"for",
"the",
"bucket"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L199-L212
|
18,835
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.get_buckets
|
def get_buckets(self, bucket_type=None, timeout=None):
"""
Fetch a list of all buckets
"""
bucket_type = self._get_bucket_type(bucket_type)
url = self.bucket_list_path(bucket_type=bucket_type,
timeout=timeout)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['buckets']
else:
raise RiakError('Error getting buckets.')
|
python
|
def get_buckets(self, bucket_type=None, timeout=None):
"""
Fetch a list of all buckets
"""
bucket_type = self._get_bucket_type(bucket_type)
url = self.bucket_list_path(bucket_type=bucket_type,
timeout=timeout)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['buckets']
else:
raise RiakError('Error getting buckets.')
|
[
"def",
"get_buckets",
"(",
"self",
",",
"bucket_type",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"bucket_type",
"=",
"self",
".",
"_get_bucket_type",
"(",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_list_path",
"(",
"bucket_type",
"=",
"bucket_type",
",",
"timeout",
"=",
"timeout",
")",
"status",
",",
"headers",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"if",
"status",
"==",
"200",
":",
"props",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"return",
"props",
"[",
"'buckets'",
"]",
"else",
":",
"raise",
"RiakError",
"(",
"'Error getting buckets.'",
")"
] |
Fetch a list of all buckets
|
[
"Fetch",
"a",
"list",
"of",
"all",
"buckets"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L225-L238
|
18,836
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.get_bucket_props
|
def get_bucket_props(self, bucket):
"""
Get properties for a bucket
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['props']
else:
raise RiakError('Error getting bucket properties.')
|
python
|
def get_bucket_props(self, bucket):
"""
Get properties for a bucket
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['props']
else:
raise RiakError('Error getting bucket properties.')
|
[
"def",
"get_bucket_props",
"(",
"self",
",",
"bucket",
")",
":",
"bucket_type",
"=",
"self",
".",
"_get_bucket_type",
"(",
"bucket",
".",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_properties_path",
"(",
"bucket",
".",
"name",
",",
"bucket_type",
"=",
"bucket_type",
")",
"status",
",",
"headers",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"if",
"status",
"==",
"200",
":",
"props",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"return",
"props",
"[",
"'props'",
"]",
"else",
":",
"raise",
"RiakError",
"(",
"'Error getting bucket properties.'",
")"
] |
Get properties for a bucket
|
[
"Get",
"properties",
"for",
"a",
"bucket"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L258-L271
|
18,837
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.set_bucket_props
|
def set_bucket_props(self, bucket, props):
"""
Set the properties on the bucket object given
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
headers = {'Content-Type': 'application/json'}
content = json.dumps({'props': props})
# Run the request...
status, _, body = self._request('PUT', url, headers, content)
if status == 401:
raise SecurityError('Not authorized to set bucket properties.')
elif status != 204:
raise RiakError('Error setting bucket properties.')
return True
|
python
|
def set_bucket_props(self, bucket, props):
"""
Set the properties on the bucket object given
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
headers = {'Content-Type': 'application/json'}
content = json.dumps({'props': props})
# Run the request...
status, _, body = self._request('PUT', url, headers, content)
if status == 401:
raise SecurityError('Not authorized to set bucket properties.')
elif status != 204:
raise RiakError('Error setting bucket properties.')
return True
|
[
"def",
"set_bucket_props",
"(",
"self",
",",
"bucket",
",",
"props",
")",
":",
"bucket_type",
"=",
"self",
".",
"_get_bucket_type",
"(",
"bucket",
".",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_properties_path",
"(",
"bucket",
".",
"name",
",",
"bucket_type",
"=",
"bucket_type",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"content",
"=",
"json",
".",
"dumps",
"(",
"{",
"'props'",
":",
"props",
"}",
")",
"# Run the request...",
"status",
",",
"_",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'PUT'",
",",
"url",
",",
"headers",
",",
"content",
")",
"if",
"status",
"==",
"401",
":",
"raise",
"SecurityError",
"(",
"'Not authorized to set bucket properties.'",
")",
"elif",
"status",
"!=",
"204",
":",
"raise",
"RiakError",
"(",
"'Error setting bucket properties.'",
")",
"return",
"True"
] |
Set the properties on the bucket object given
|
[
"Set",
"the",
"properties",
"on",
"the",
"bucket",
"object",
"given"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L273-L290
|
18,838
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.clear_bucket_props
|
def clear_bucket_props(self, bucket):
"""
reset the properties on the bucket object given
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
url = self.bucket_properties_path(bucket.name)
headers = {'Content-Type': 'application/json'}
# Run the request...
status, _, _ = self._request('DELETE', url, headers, None)
if status == 204:
return True
elif status == 405:
return False
else:
raise RiakError('Error %s clearing bucket properties.'
% status)
|
python
|
def clear_bucket_props(self, bucket):
"""
reset the properties on the bucket object given
"""
bucket_type = self._get_bucket_type(bucket.bucket_type)
url = self.bucket_properties_path(bucket.name,
bucket_type=bucket_type)
url = self.bucket_properties_path(bucket.name)
headers = {'Content-Type': 'application/json'}
# Run the request...
status, _, _ = self._request('DELETE', url, headers, None)
if status == 204:
return True
elif status == 405:
return False
else:
raise RiakError('Error %s clearing bucket properties.'
% status)
|
[
"def",
"clear_bucket_props",
"(",
"self",
",",
"bucket",
")",
":",
"bucket_type",
"=",
"self",
".",
"_get_bucket_type",
"(",
"bucket",
".",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_properties_path",
"(",
"bucket",
".",
"name",
",",
"bucket_type",
"=",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_properties_path",
"(",
"bucket",
".",
"name",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"# Run the request...",
"status",
",",
"_",
",",
"_",
"=",
"self",
".",
"_request",
"(",
"'DELETE'",
",",
"url",
",",
"headers",
",",
"None",
")",
"if",
"status",
"==",
"204",
":",
"return",
"True",
"elif",
"status",
"==",
"405",
":",
"return",
"False",
"else",
":",
"raise",
"RiakError",
"(",
"'Error %s clearing bucket properties.'",
"%",
"status",
")"
] |
reset the properties on the bucket object given
|
[
"reset",
"the",
"properties",
"on",
"the",
"bucket",
"object",
"given"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L292-L311
|
18,839
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.get_bucket_type_props
|
def get_bucket_type_props(self, bucket_type):
"""
Get properties for a bucket-type
"""
self._check_bucket_types(bucket_type)
url = self.bucket_type_properties_path(bucket_type.name)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['props']
else:
raise RiakError('Error getting bucket-type properties.')
|
python
|
def get_bucket_type_props(self, bucket_type):
"""
Get properties for a bucket-type
"""
self._check_bucket_types(bucket_type)
url = self.bucket_type_properties_path(bucket_type.name)
status, headers, body = self._request('GET', url)
if status == 200:
props = json.loads(bytes_to_str(body))
return props['props']
else:
raise RiakError('Error getting bucket-type properties.')
|
[
"def",
"get_bucket_type_props",
"(",
"self",
",",
"bucket_type",
")",
":",
"self",
".",
"_check_bucket_types",
"(",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_type_properties_path",
"(",
"bucket_type",
".",
"name",
")",
"status",
",",
"headers",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"if",
"status",
"==",
"200",
":",
"props",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"return",
"props",
"[",
"'props'",
"]",
"else",
":",
"raise",
"RiakError",
"(",
"'Error getting bucket-type properties.'",
")"
] |
Get properties for a bucket-type
|
[
"Get",
"properties",
"for",
"a",
"bucket",
"-",
"type"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L313-L325
|
18,840
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.set_bucket_type_props
|
def set_bucket_type_props(self, bucket_type, props):
"""
Set the properties on the bucket-type
"""
self._check_bucket_types(bucket_type)
url = self.bucket_type_properties_path(bucket_type.name)
headers = {'Content-Type': 'application/json'}
content = json.dumps({'props': props})
# Run the request...
status, _, _ = self._request('PUT', url, headers, content)
if status != 204:
raise RiakError('Error setting bucket-type properties.')
return True
|
python
|
def set_bucket_type_props(self, bucket_type, props):
"""
Set the properties on the bucket-type
"""
self._check_bucket_types(bucket_type)
url = self.bucket_type_properties_path(bucket_type.name)
headers = {'Content-Type': 'application/json'}
content = json.dumps({'props': props})
# Run the request...
status, _, _ = self._request('PUT', url, headers, content)
if status != 204:
raise RiakError('Error setting bucket-type properties.')
return True
|
[
"def",
"set_bucket_type_props",
"(",
"self",
",",
"bucket_type",
",",
"props",
")",
":",
"self",
".",
"_check_bucket_types",
"(",
"bucket_type",
")",
"url",
"=",
"self",
".",
"bucket_type_properties_path",
"(",
"bucket_type",
".",
"name",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"content",
"=",
"json",
".",
"dumps",
"(",
"{",
"'props'",
":",
"props",
"}",
")",
"# Run the request...",
"status",
",",
"_",
",",
"_",
"=",
"self",
".",
"_request",
"(",
"'PUT'",
",",
"url",
",",
"headers",
",",
"content",
")",
"if",
"status",
"!=",
"204",
":",
"raise",
"RiakError",
"(",
"'Error setting bucket-type properties.'",
")",
"return",
"True"
] |
Set the properties on the bucket-type
|
[
"Set",
"the",
"properties",
"on",
"the",
"bucket",
"-",
"type"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L327-L341
|
18,841
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.mapred
|
def mapred(self, inputs, query, timeout=None):
"""
Run a MapReduce query.
"""
# Construct the job, optionally set the timeout...
content = self._construct_mapred_json(inputs, query, timeout)
# Do the request...
url = self.mapred_path()
headers = {'Content-Type': 'application/json'}
status, headers, body = self._request('POST', url, headers, content)
# Make sure the expected status code came back...
if status != 200:
raise RiakError(
'Error running MapReduce operation. Headers: %s Body: %s' %
(repr(headers), repr(body)))
result = json.loads(bytes_to_str(body))
return result
|
python
|
def mapred(self, inputs, query, timeout=None):
"""
Run a MapReduce query.
"""
# Construct the job, optionally set the timeout...
content = self._construct_mapred_json(inputs, query, timeout)
# Do the request...
url = self.mapred_path()
headers = {'Content-Type': 'application/json'}
status, headers, body = self._request('POST', url, headers, content)
# Make sure the expected status code came back...
if status != 200:
raise RiakError(
'Error running MapReduce operation. Headers: %s Body: %s' %
(repr(headers), repr(body)))
result = json.loads(bytes_to_str(body))
return result
|
[
"def",
"mapred",
"(",
"self",
",",
"inputs",
",",
"query",
",",
"timeout",
"=",
"None",
")",
":",
"# Construct the job, optionally set the timeout...",
"content",
"=",
"self",
".",
"_construct_mapred_json",
"(",
"inputs",
",",
"query",
",",
"timeout",
")",
"# Do the request...",
"url",
"=",
"self",
".",
"mapred_path",
"(",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"status",
",",
"headers",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'POST'",
",",
"url",
",",
"headers",
",",
"content",
")",
"# Make sure the expected status code came back...",
"if",
"status",
"!=",
"200",
":",
"raise",
"RiakError",
"(",
"'Error running MapReduce operation. Headers: %s Body: %s'",
"%",
"(",
"repr",
"(",
"headers",
")",
",",
"repr",
"(",
"body",
")",
")",
")",
"result",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"return",
"result"
] |
Run a MapReduce query.
|
[
"Run",
"a",
"MapReduce",
"query",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L343-L362
|
18,842
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.create_search_index
|
def create_search_index(self, index, schema=None, n_val=None,
timeout=None):
"""
Create a Solr search index for Yokozuna.
:param index: a name of a yz index
:type index: string
:param schema: XML of Solr schema
:type schema: string
:param n_val: N value of the write
:type n_val: int
:param timeout: optional timeout (in ms)
:type timeout: integer, None
:rtype boolean
"""
if not self.yz_wm_index:
raise NotImplementedError("Search 2.0 administration is not "
"supported for this version")
url = self.search_index_path(index)
headers = {'Content-Type': 'application/json'}
content_dict = dict()
if schema:
content_dict['schema'] = schema
if n_val:
content_dict['n_val'] = n_val
if timeout:
content_dict['timeout'] = timeout
content = json.dumps(content_dict)
# Run the request...
status, _, _ = self._request('PUT', url, headers, content)
if status != 204:
raise RiakError('Error setting Search 2.0 index.')
return True
|
python
|
def create_search_index(self, index, schema=None, n_val=None,
timeout=None):
"""
Create a Solr search index for Yokozuna.
:param index: a name of a yz index
:type index: string
:param schema: XML of Solr schema
:type schema: string
:param n_val: N value of the write
:type n_val: int
:param timeout: optional timeout (in ms)
:type timeout: integer, None
:rtype boolean
"""
if not self.yz_wm_index:
raise NotImplementedError("Search 2.0 administration is not "
"supported for this version")
url = self.search_index_path(index)
headers = {'Content-Type': 'application/json'}
content_dict = dict()
if schema:
content_dict['schema'] = schema
if n_val:
content_dict['n_val'] = n_val
if timeout:
content_dict['timeout'] = timeout
content = json.dumps(content_dict)
# Run the request...
status, _, _ = self._request('PUT', url, headers, content)
if status != 204:
raise RiakError('Error setting Search 2.0 index.')
return True
|
[
"def",
"create_search_index",
"(",
"self",
",",
"index",
",",
"schema",
"=",
"None",
",",
"n_val",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"yz_wm_index",
":",
"raise",
"NotImplementedError",
"(",
"\"Search 2.0 administration is not \"",
"\"supported for this version\"",
")",
"url",
"=",
"self",
".",
"search_index_path",
"(",
"index",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/json'",
"}",
"content_dict",
"=",
"dict",
"(",
")",
"if",
"schema",
":",
"content_dict",
"[",
"'schema'",
"]",
"=",
"schema",
"if",
"n_val",
":",
"content_dict",
"[",
"'n_val'",
"]",
"=",
"n_val",
"if",
"timeout",
":",
"content_dict",
"[",
"'timeout'",
"]",
"=",
"timeout",
"content",
"=",
"json",
".",
"dumps",
"(",
"content_dict",
")",
"# Run the request...",
"status",
",",
"_",
",",
"_",
"=",
"self",
".",
"_request",
"(",
"'PUT'",
",",
"url",
",",
"headers",
",",
"content",
")",
"if",
"status",
"!=",
"204",
":",
"raise",
"RiakError",
"(",
"'Error setting Search 2.0 index.'",
")",
"return",
"True"
] |
Create a Solr search index for Yokozuna.
:param index: a name of a yz index
:type index: string
:param schema: XML of Solr schema
:type schema: string
:param n_val: N value of the write
:type n_val: int
:param timeout: optional timeout (in ms)
:type timeout: integer, None
:rtype boolean
|
[
"Create",
"a",
"Solr",
"search",
"index",
"for",
"Yokozuna",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L447-L483
|
18,843
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.list_search_indexes
|
def list_search_indexes(self):
"""
Return a list of Solr search indexes from Yokozuna.
:rtype list of dicts
"""
if not self.yz_wm_index:
raise NotImplementedError("Search 2.0 administration is not "
"supported for this version")
url = self.search_index_path()
# Run the request...
status, headers, body = self._request('GET', url)
if status == 200:
json_data = json.loads(bytes_to_str(body))
# Return a list of dictionaries
return json_data
else:
raise RiakError('Error getting Search 2.0 index.')
|
python
|
def list_search_indexes(self):
"""
Return a list of Solr search indexes from Yokozuna.
:rtype list of dicts
"""
if not self.yz_wm_index:
raise NotImplementedError("Search 2.0 administration is not "
"supported for this version")
url = self.search_index_path()
# Run the request...
status, headers, body = self._request('GET', url)
if status == 200:
json_data = json.loads(bytes_to_str(body))
# Return a list of dictionaries
return json_data
else:
raise RiakError('Error getting Search 2.0 index.')
|
[
"def",
"list_search_indexes",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"yz_wm_index",
":",
"raise",
"NotImplementedError",
"(",
"\"Search 2.0 administration is not \"",
"\"supported for this version\"",
")",
"url",
"=",
"self",
".",
"search_index_path",
"(",
")",
"# Run the request...",
"status",
",",
"headers",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"if",
"status",
"==",
"200",
":",
"json_data",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"body",
")",
")",
"# Return a list of dictionaries",
"return",
"json_data",
"else",
":",
"raise",
"RiakError",
"(",
"'Error getting Search 2.0 index.'",
")"
] |
Return a list of Solr search indexes from Yokozuna.
:rtype list of dicts
|
[
"Return",
"a",
"list",
"of",
"Solr",
"search",
"indexes",
"from",
"Yokozuna",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L508-L528
|
18,844
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.create_search_schema
|
def create_search_schema(self, schema, content):
"""
Create a new Solr schema for Yokozuna.
:param schema: name of Solr schema
:type schema: string
:param content: actual defintion of schema (XML)
:type content: string
:rtype boolean
"""
if not self.yz_wm_schema:
raise NotImplementedError("Search 2.0 administration is not "
"supported for this version")
url = self.search_schema_path(schema)
headers = {'Content-Type': 'application/xml'}
# Run the request...
status, header, body = self._request('PUT', url, headers, content)
if status != 204:
raise RiakError('Error creating Search 2.0 schema.')
return True
|
python
|
def create_search_schema(self, schema, content):
"""
Create a new Solr schema for Yokozuna.
:param schema: name of Solr schema
:type schema: string
:param content: actual defintion of schema (XML)
:type content: string
:rtype boolean
"""
if not self.yz_wm_schema:
raise NotImplementedError("Search 2.0 administration is not "
"supported for this version")
url = self.search_schema_path(schema)
headers = {'Content-Type': 'application/xml'}
# Run the request...
status, header, body = self._request('PUT', url, headers, content)
if status != 204:
raise RiakError('Error creating Search 2.0 schema.')
return True
|
[
"def",
"create_search_schema",
"(",
"self",
",",
"schema",
",",
"content",
")",
":",
"if",
"not",
"self",
".",
"yz_wm_schema",
":",
"raise",
"NotImplementedError",
"(",
"\"Search 2.0 administration is not \"",
"\"supported for this version\"",
")",
"url",
"=",
"self",
".",
"search_schema_path",
"(",
"schema",
")",
"headers",
"=",
"{",
"'Content-Type'",
":",
"'application/xml'",
"}",
"# Run the request...",
"status",
",",
"header",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'PUT'",
",",
"url",
",",
"headers",
",",
"content",
")",
"if",
"status",
"!=",
"204",
":",
"raise",
"RiakError",
"(",
"'Error creating Search 2.0 schema.'",
")",
"return",
"True"
] |
Create a new Solr schema for Yokozuna.
:param schema: name of Solr schema
:type schema: string
:param content: actual defintion of schema (XML)
:type content: string
:rtype boolean
|
[
"Create",
"a",
"new",
"Solr",
"schema",
"for",
"Yokozuna",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L552-L575
|
18,845
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.get_search_schema
|
def get_search_schema(self, schema):
"""
Fetch a Solr schema from Yokozuna.
:param schema: name of Solr schema
:type schema: string
:rtype dict
"""
if not self.yz_wm_schema:
raise NotImplementedError("Search 2.0 administration is not "
"supported for this version")
url = self.search_schema_path(schema)
# Run the request...
status, _, body = self._request('GET', url)
if status == 200:
result = {}
result['name'] = schema
result['content'] = bytes_to_str(body)
return result
else:
raise RiakError('Error getting Search 2.0 schema.')
|
python
|
def get_search_schema(self, schema):
"""
Fetch a Solr schema from Yokozuna.
:param schema: name of Solr schema
:type schema: string
:rtype dict
"""
if not self.yz_wm_schema:
raise NotImplementedError("Search 2.0 administration is not "
"supported for this version")
url = self.search_schema_path(schema)
# Run the request...
status, _, body = self._request('GET', url)
if status == 200:
result = {}
result['name'] = schema
result['content'] = bytes_to_str(body)
return result
else:
raise RiakError('Error getting Search 2.0 schema.')
|
[
"def",
"get_search_schema",
"(",
"self",
",",
"schema",
")",
":",
"if",
"not",
"self",
".",
"yz_wm_schema",
":",
"raise",
"NotImplementedError",
"(",
"\"Search 2.0 administration is not \"",
"\"supported for this version\"",
")",
"url",
"=",
"self",
".",
"search_schema_path",
"(",
"schema",
")",
"# Run the request...",
"status",
",",
"_",
",",
"body",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"if",
"status",
"==",
"200",
":",
"result",
"=",
"{",
"}",
"result",
"[",
"'name'",
"]",
"=",
"schema",
"result",
"[",
"'content'",
"]",
"=",
"bytes_to_str",
"(",
"body",
")",
"return",
"result",
"else",
":",
"raise",
"RiakError",
"(",
"'Error getting Search 2.0 schema.'",
")"
] |
Fetch a Solr schema from Yokozuna.
:param schema: name of Solr schema
:type schema: string
:rtype dict
|
[
"Fetch",
"a",
"Solr",
"schema",
"from",
"Yokozuna",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L577-L600
|
18,846
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.search
|
def search(self, index, query, **params):
"""
Performs a search query.
"""
if index is None:
index = 'search'
options = {}
if 'op' in params:
op = params.pop('op')
options['q.op'] = op
options.update(params)
url = self.solr_select_path(index, query, **options)
status, headers, data = self._request('GET', url)
self.check_http_code(status, [200])
if 'json' in headers['content-type']:
results = json.loads(bytes_to_str(data))
return self._normalize_json_search_response(results)
elif 'xml' in headers['content-type']:
return self._normalize_xml_search_response(data)
else:
raise ValueError("Could not decode search response")
|
python
|
def search(self, index, query, **params):
"""
Performs a search query.
"""
if index is None:
index = 'search'
options = {}
if 'op' in params:
op = params.pop('op')
options['q.op'] = op
options.update(params)
url = self.solr_select_path(index, query, **options)
status, headers, data = self._request('GET', url)
self.check_http_code(status, [200])
if 'json' in headers['content-type']:
results = json.loads(bytes_to_str(data))
return self._normalize_json_search_response(results)
elif 'xml' in headers['content-type']:
return self._normalize_xml_search_response(data)
else:
raise ValueError("Could not decode search response")
|
[
"def",
"search",
"(",
"self",
",",
"index",
",",
"query",
",",
"*",
"*",
"params",
")",
":",
"if",
"index",
"is",
"None",
":",
"index",
"=",
"'search'",
"options",
"=",
"{",
"}",
"if",
"'op'",
"in",
"params",
":",
"op",
"=",
"params",
".",
"pop",
"(",
"'op'",
")",
"options",
"[",
"'q.op'",
"]",
"=",
"op",
"options",
".",
"update",
"(",
"params",
")",
"url",
"=",
"self",
".",
"solr_select_path",
"(",
"index",
",",
"query",
",",
"*",
"*",
"options",
")",
"status",
",",
"headers",
",",
"data",
"=",
"self",
".",
"_request",
"(",
"'GET'",
",",
"url",
")",
"self",
".",
"check_http_code",
"(",
"status",
",",
"[",
"200",
"]",
")",
"if",
"'json'",
"in",
"headers",
"[",
"'content-type'",
"]",
":",
"results",
"=",
"json",
".",
"loads",
"(",
"bytes_to_str",
"(",
"data",
")",
")",
"return",
"self",
".",
"_normalize_json_search_response",
"(",
"results",
")",
"elif",
"'xml'",
"in",
"headers",
"[",
"'content-type'",
"]",
":",
"return",
"self",
".",
"_normalize_xml_search_response",
"(",
"data",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Could not decode search response\"",
")"
] |
Performs a search query.
|
[
"Performs",
"a",
"search",
"query",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L602-L624
|
18,847
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.fulltext_add
|
def fulltext_add(self, index, docs):
"""
Adds documents to the search index.
"""
xml = Document()
root = xml.createElement('add')
for doc in docs:
doc_element = xml.createElement('doc')
for key in doc:
value = doc[key]
field = xml.createElement('field')
field.setAttribute("name", key)
text = xml.createTextNode(value)
field.appendChild(text)
doc_element.appendChild(field)
root.appendChild(doc_element)
xml.appendChild(root)
self._request('POST', self.solr_update_path(index),
{'Content-Type': 'text/xml'},
xml.toxml().encode('utf-8'))
|
python
|
def fulltext_add(self, index, docs):
"""
Adds documents to the search index.
"""
xml = Document()
root = xml.createElement('add')
for doc in docs:
doc_element = xml.createElement('doc')
for key in doc:
value = doc[key]
field = xml.createElement('field')
field.setAttribute("name", key)
text = xml.createTextNode(value)
field.appendChild(text)
doc_element.appendChild(field)
root.appendChild(doc_element)
xml.appendChild(root)
self._request('POST', self.solr_update_path(index),
{'Content-Type': 'text/xml'},
xml.toxml().encode('utf-8'))
|
[
"def",
"fulltext_add",
"(",
"self",
",",
"index",
",",
"docs",
")",
":",
"xml",
"=",
"Document",
"(",
")",
"root",
"=",
"xml",
".",
"createElement",
"(",
"'add'",
")",
"for",
"doc",
"in",
"docs",
":",
"doc_element",
"=",
"xml",
".",
"createElement",
"(",
"'doc'",
")",
"for",
"key",
"in",
"doc",
":",
"value",
"=",
"doc",
"[",
"key",
"]",
"field",
"=",
"xml",
".",
"createElement",
"(",
"'field'",
")",
"field",
".",
"setAttribute",
"(",
"\"name\"",
",",
"key",
")",
"text",
"=",
"xml",
".",
"createTextNode",
"(",
"value",
")",
"field",
".",
"appendChild",
"(",
"text",
")",
"doc_element",
".",
"appendChild",
"(",
"field",
")",
"root",
".",
"appendChild",
"(",
"doc_element",
")",
"xml",
".",
"appendChild",
"(",
"root",
")",
"self",
".",
"_request",
"(",
"'POST'",
",",
"self",
".",
"solr_update_path",
"(",
"index",
")",
",",
"{",
"'Content-Type'",
":",
"'text/xml'",
"}",
",",
"xml",
".",
"toxml",
"(",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")"
] |
Adds documents to the search index.
|
[
"Adds",
"documents",
"to",
"the",
"search",
"index",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L626-L646
|
18,848
|
basho/riak-python-client
|
riak/transports/http/transport.py
|
HttpTransport.fulltext_delete
|
def fulltext_delete(self, index, docs=None, queries=None):
"""
Removes documents from the full-text index.
"""
xml = Document()
root = xml.createElement('delete')
if docs:
for doc in docs:
doc_element = xml.createElement('id')
text = xml.createTextNode(doc)
doc_element.appendChild(text)
root.appendChild(doc_element)
if queries:
for query in queries:
query_element = xml.createElement('query')
text = xml.createTextNode(query)
query_element.appendChild(text)
root.appendChild(query_element)
xml.appendChild(root)
self._request('POST', self.solr_update_path(index),
{'Content-Type': 'text/xml'},
xml.toxml().encode('utf-8'))
|
python
|
def fulltext_delete(self, index, docs=None, queries=None):
"""
Removes documents from the full-text index.
"""
xml = Document()
root = xml.createElement('delete')
if docs:
for doc in docs:
doc_element = xml.createElement('id')
text = xml.createTextNode(doc)
doc_element.appendChild(text)
root.appendChild(doc_element)
if queries:
for query in queries:
query_element = xml.createElement('query')
text = xml.createTextNode(query)
query_element.appendChild(text)
root.appendChild(query_element)
xml.appendChild(root)
self._request('POST', self.solr_update_path(index),
{'Content-Type': 'text/xml'},
xml.toxml().encode('utf-8'))
|
[
"def",
"fulltext_delete",
"(",
"self",
",",
"index",
",",
"docs",
"=",
"None",
",",
"queries",
"=",
"None",
")",
":",
"xml",
"=",
"Document",
"(",
")",
"root",
"=",
"xml",
".",
"createElement",
"(",
"'delete'",
")",
"if",
"docs",
":",
"for",
"doc",
"in",
"docs",
":",
"doc_element",
"=",
"xml",
".",
"createElement",
"(",
"'id'",
")",
"text",
"=",
"xml",
".",
"createTextNode",
"(",
"doc",
")",
"doc_element",
".",
"appendChild",
"(",
"text",
")",
"root",
".",
"appendChild",
"(",
"doc_element",
")",
"if",
"queries",
":",
"for",
"query",
"in",
"queries",
":",
"query_element",
"=",
"xml",
".",
"createElement",
"(",
"'query'",
")",
"text",
"=",
"xml",
".",
"createTextNode",
"(",
"query",
")",
"query_element",
".",
"appendChild",
"(",
"text",
")",
"root",
".",
"appendChild",
"(",
"query_element",
")",
"xml",
".",
"appendChild",
"(",
"root",
")",
"self",
".",
"_request",
"(",
"'POST'",
",",
"self",
".",
"solr_update_path",
"(",
"index",
")",
",",
"{",
"'Content-Type'",
":",
"'text/xml'",
"}",
",",
"xml",
".",
"toxml",
"(",
")",
".",
"encode",
"(",
"'utf-8'",
")",
")"
] |
Removes documents from the full-text index.
|
[
"Removes",
"documents",
"from",
"the",
"full",
"-",
"text",
"index",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L648-L671
|
18,849
|
basho/riak-python-client
|
riak/transports/pool.py
|
Resource.release
|
def release(self):
"""
Releases this resource back to the pool it came from.
"""
if self.errored:
self.pool.delete_resource(self)
else:
self.pool.release(self)
|
python
|
def release(self):
"""
Releases this resource back to the pool it came from.
"""
if self.errored:
self.pool.delete_resource(self)
else:
self.pool.release(self)
|
[
"def",
"release",
"(",
"self",
")",
":",
"if",
"self",
".",
"errored",
":",
"self",
".",
"pool",
".",
"delete_resource",
"(",
"self",
")",
"else",
":",
"self",
".",
"pool",
".",
"release",
"(",
"self",
")"
] |
Releases this resource back to the pool it came from.
|
[
"Releases",
"this",
"resource",
"back",
"to",
"the",
"pool",
"it",
"came",
"from",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/pool.py#L76-L83
|
18,850
|
basho/riak-python-client
|
riak/transports/pool.py
|
Pool.delete_resource
|
def delete_resource(self, resource):
"""
Deletes the resource from the pool and destroys the associated
resource. Not usually needed by users of the pool, but called
internally when BadResource is raised.
:param resource: the resource to remove
:type resource: Resource
"""
with self.lock:
self.resources.remove(resource)
self.destroy_resource(resource.object)
del resource
|
python
|
def delete_resource(self, resource):
"""
Deletes the resource from the pool and destroys the associated
resource. Not usually needed by users of the pool, but called
internally when BadResource is raised.
:param resource: the resource to remove
:type resource: Resource
"""
with self.lock:
self.resources.remove(resource)
self.destroy_resource(resource.object)
del resource
|
[
"def",
"delete_resource",
"(",
"self",
",",
"resource",
")",
":",
"with",
"self",
".",
"lock",
":",
"self",
".",
"resources",
".",
"remove",
"(",
"resource",
")",
"self",
".",
"destroy_resource",
"(",
"resource",
".",
"object",
")",
"del",
"resource"
] |
Deletes the resource from the pool and destroys the associated
resource. Not usually needed by users of the pool, but called
internally when BadResource is raised.
:param resource: the resource to remove
:type resource: Resource
|
[
"Deletes",
"the",
"resource",
"from",
"the",
"pool",
"and",
"destroys",
"the",
"associated",
"resource",
".",
"Not",
"usually",
"needed",
"by",
"users",
"of",
"the",
"pool",
"but",
"called",
"internally",
"when",
"BadResource",
"is",
"raised",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/pool.py#L209-L221
|
18,851
|
basho/riak-python-client
|
riak/codecs/ttb.py
|
TtbCodec.encode_timeseries_put
|
def encode_timeseries_put(self, tsobj):
'''
Returns an Erlang-TTB encoded tuple with the appropriate data and
metadata from a TsObject.
:param tsobj: a TsObject
:type tsobj: TsObject
:rtype: term-to-binary encoded object
'''
if tsobj.columns:
raise NotImplementedError('columns are not used')
if tsobj.rows and isinstance(tsobj.rows, list):
req_rows = []
for row in tsobj.rows:
req_r = []
for cell in row:
req_r.append(self.encode_to_ts_cell(cell))
req_rows.append(tuple(req_r))
req = tsputreq_a, tsobj.table.name, [], req_rows
mc = MSG_CODE_TS_TTB_MSG
rc = MSG_CODE_TS_TTB_MSG
return Msg(mc, encode(req), rc)
else:
raise RiakError("TsObject requires a list of rows")
|
python
|
def encode_timeseries_put(self, tsobj):
'''
Returns an Erlang-TTB encoded tuple with the appropriate data and
metadata from a TsObject.
:param tsobj: a TsObject
:type tsobj: TsObject
:rtype: term-to-binary encoded object
'''
if tsobj.columns:
raise NotImplementedError('columns are not used')
if tsobj.rows and isinstance(tsobj.rows, list):
req_rows = []
for row in tsobj.rows:
req_r = []
for cell in row:
req_r.append(self.encode_to_ts_cell(cell))
req_rows.append(tuple(req_r))
req = tsputreq_a, tsobj.table.name, [], req_rows
mc = MSG_CODE_TS_TTB_MSG
rc = MSG_CODE_TS_TTB_MSG
return Msg(mc, encode(req), rc)
else:
raise RiakError("TsObject requires a list of rows")
|
[
"def",
"encode_timeseries_put",
"(",
"self",
",",
"tsobj",
")",
":",
"if",
"tsobj",
".",
"columns",
":",
"raise",
"NotImplementedError",
"(",
"'columns are not used'",
")",
"if",
"tsobj",
".",
"rows",
"and",
"isinstance",
"(",
"tsobj",
".",
"rows",
",",
"list",
")",
":",
"req_rows",
"=",
"[",
"]",
"for",
"row",
"in",
"tsobj",
".",
"rows",
":",
"req_r",
"=",
"[",
"]",
"for",
"cell",
"in",
"row",
":",
"req_r",
".",
"append",
"(",
"self",
".",
"encode_to_ts_cell",
"(",
"cell",
")",
")",
"req_rows",
".",
"append",
"(",
"tuple",
"(",
"req_r",
")",
")",
"req",
"=",
"tsputreq_a",
",",
"tsobj",
".",
"table",
".",
"name",
",",
"[",
"]",
",",
"req_rows",
"mc",
"=",
"MSG_CODE_TS_TTB_MSG",
"rc",
"=",
"MSG_CODE_TS_TTB_MSG",
"return",
"Msg",
"(",
"mc",
",",
"encode",
"(",
"req",
")",
",",
"rc",
")",
"else",
":",
"raise",
"RiakError",
"(",
"\"TsObject requires a list of rows\"",
")"
] |
Returns an Erlang-TTB encoded tuple with the appropriate data and
metadata from a TsObject.
:param tsobj: a TsObject
:type tsobj: TsObject
:rtype: term-to-binary encoded object
|
[
"Returns",
"an",
"Erlang",
"-",
"TTB",
"encoded",
"tuple",
"with",
"the",
"appropriate",
"data",
"and",
"metadata",
"from",
"a",
"TsObject",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/ttb.py#L116-L140
|
18,852
|
basho/riak-python-client
|
riak/codecs/ttb.py
|
TtbCodec.decode_timeseries_row
|
def decode_timeseries_row(self, tsrow, tsct, convert_timestamp=False):
"""
Decodes a TTB-encoded TsRow into a list
:param tsrow: the TTB decoded TsRow to decode.
:type tsrow: TTB dncoded row
:param tsct: the TTB decoded column types (atoms).
:type tsct: list
:param convert_timestamp: Convert timestamps to datetime objects
:type tsobj: boolean
:rtype list
"""
row = []
for i, cell in enumerate(tsrow):
if cell is None:
row.append(None)
elif isinstance(cell, list) and len(cell) == 0:
row.append(None)
else:
if convert_timestamp and tsct[i] == timestamp_a:
row.append(datetime_from_unix_time_millis(cell))
else:
row.append(cell)
return row
|
python
|
def decode_timeseries_row(self, tsrow, tsct, convert_timestamp=False):
"""
Decodes a TTB-encoded TsRow into a list
:param tsrow: the TTB decoded TsRow to decode.
:type tsrow: TTB dncoded row
:param tsct: the TTB decoded column types (atoms).
:type tsct: list
:param convert_timestamp: Convert timestamps to datetime objects
:type tsobj: boolean
:rtype list
"""
row = []
for i, cell in enumerate(tsrow):
if cell is None:
row.append(None)
elif isinstance(cell, list) and len(cell) == 0:
row.append(None)
else:
if convert_timestamp and tsct[i] == timestamp_a:
row.append(datetime_from_unix_time_millis(cell))
else:
row.append(cell)
return row
|
[
"def",
"decode_timeseries_row",
"(",
"self",
",",
"tsrow",
",",
"tsct",
",",
"convert_timestamp",
"=",
"False",
")",
":",
"row",
"=",
"[",
"]",
"for",
"i",
",",
"cell",
"in",
"enumerate",
"(",
"tsrow",
")",
":",
"if",
"cell",
"is",
"None",
":",
"row",
".",
"append",
"(",
"None",
")",
"elif",
"isinstance",
"(",
"cell",
",",
"list",
")",
"and",
"len",
"(",
"cell",
")",
"==",
"0",
":",
"row",
".",
"append",
"(",
"None",
")",
"else",
":",
"if",
"convert_timestamp",
"and",
"tsct",
"[",
"i",
"]",
"==",
"timestamp_a",
":",
"row",
".",
"append",
"(",
"datetime_from_unix_time_millis",
"(",
"cell",
")",
")",
"else",
":",
"row",
".",
"append",
"(",
"cell",
")",
"return",
"row"
] |
Decodes a TTB-encoded TsRow into a list
:param tsrow: the TTB decoded TsRow to decode.
:type tsrow: TTB dncoded row
:param tsct: the TTB decoded column types (atoms).
:type tsct: list
:param convert_timestamp: Convert timestamps to datetime objects
:type tsobj: boolean
:rtype list
|
[
"Decodes",
"a",
"TTB",
"-",
"encoded",
"TsRow",
"into",
"a",
"list"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/ttb.py#L205-L228
|
18,853
|
basho/riak-python-client
|
riak/datatypes/set.py
|
Set.to_op
|
def to_op(self):
"""
Extracts the modification operation from the set.
:rtype: dict, None
"""
if not self._adds and not self._removes:
return None
changes = {}
if self._adds:
changes['adds'] = list(self._adds)
if self._removes:
changes['removes'] = list(self._removes)
return changes
|
python
|
def to_op(self):
"""
Extracts the modification operation from the set.
:rtype: dict, None
"""
if not self._adds and not self._removes:
return None
changes = {}
if self._adds:
changes['adds'] = list(self._adds)
if self._removes:
changes['removes'] = list(self._removes)
return changes
|
[
"def",
"to_op",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"_adds",
"and",
"not",
"self",
".",
"_removes",
":",
"return",
"None",
"changes",
"=",
"{",
"}",
"if",
"self",
".",
"_adds",
":",
"changes",
"[",
"'adds'",
"]",
"=",
"list",
"(",
"self",
".",
"_adds",
")",
"if",
"self",
".",
"_removes",
":",
"changes",
"[",
"'removes'",
"]",
"=",
"list",
"(",
"self",
".",
"_removes",
")",
"return",
"changes"
] |
Extracts the modification operation from the set.
:rtype: dict, None
|
[
"Extracts",
"the",
"modification",
"operation",
"from",
"the",
"set",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/set.py#L60-L73
|
18,854
|
basho/riak-python-client
|
riak/datatypes/set.py
|
Set.discard
|
def discard(self, element):
"""
Removes an element from the set.
.. note: You may remove elements from the set that are not
present, but a context from the server is required.
:param element: the element to remove
:type element: str
"""
_check_element(element)
self._require_context()
self._removes.add(element)
|
python
|
def discard(self, element):
"""
Removes an element from the set.
.. note: You may remove elements from the set that are not
present, but a context from the server is required.
:param element: the element to remove
:type element: str
"""
_check_element(element)
self._require_context()
self._removes.add(element)
|
[
"def",
"discard",
"(",
"self",
",",
"element",
")",
":",
"_check_element",
"(",
"element",
")",
"self",
".",
"_require_context",
"(",
")",
"self",
".",
"_removes",
".",
"add",
"(",
"element",
")"
] |
Removes an element from the set.
.. note: You may remove elements from the set that are not
present, but a context from the server is required.
:param element: the element to remove
:type element: str
|
[
"Removes",
"an",
"element",
"from",
"the",
"set",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/set.py#L101-L113
|
18,855
|
basho/riak-python-client
|
riak/multidict.py
|
MultiDict.getone
|
def getone(self, key):
"""
Get one value matching the key, raising a KeyError if multiple
values were found.
"""
v = self.getall(key)
if not v:
raise KeyError('Key not found: %r' % key)
if len(v) > 1:
raise KeyError('Multiple values match %r: %r' % (key, v))
return v[0]
|
python
|
def getone(self, key):
"""
Get one value matching the key, raising a KeyError if multiple
values were found.
"""
v = self.getall(key)
if not v:
raise KeyError('Key not found: %r' % key)
if len(v) > 1:
raise KeyError('Multiple values match %r: %r' % (key, v))
return v[0]
|
[
"def",
"getone",
"(",
"self",
",",
"key",
")",
":",
"v",
"=",
"self",
".",
"getall",
"(",
"key",
")",
"if",
"not",
"v",
":",
"raise",
"KeyError",
"(",
"'Key not found: %r'",
"%",
"key",
")",
"if",
"len",
"(",
"v",
")",
">",
"1",
":",
"raise",
"KeyError",
"(",
"'Multiple values match %r: %r'",
"%",
"(",
"key",
",",
"v",
")",
")",
"return",
"v",
"[",
"0",
"]"
] |
Get one value matching the key, raising a KeyError if multiple
values were found.
|
[
"Get",
"one",
"value",
"matching",
"the",
"key",
"raising",
"a",
"KeyError",
"if",
"multiple",
"values",
"were",
"found",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/multidict.py#L73-L83
|
18,856
|
basho/riak-python-client
|
riak/multidict.py
|
MultiDict.dict_of_lists
|
def dict_of_lists(self):
"""
Returns a dictionary where each key is associated with a
list of values.
"""
result = {}
for key, value in self._items:
if key in result:
result[key].append(value)
else:
result[key] = [value]
return result
|
python
|
def dict_of_lists(self):
"""
Returns a dictionary where each key is associated with a
list of values.
"""
result = {}
for key, value in self._items:
if key in result:
result[key].append(value)
else:
result[key] = [value]
return result
|
[
"def",
"dict_of_lists",
"(",
"self",
")",
":",
"result",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"self",
".",
"_items",
":",
"if",
"key",
"in",
"result",
":",
"result",
"[",
"key",
"]",
".",
"append",
"(",
"value",
")",
"else",
":",
"result",
"[",
"key",
"]",
"=",
"[",
"value",
"]",
"return",
"result"
] |
Returns a dictionary where each key is associated with a
list of values.
|
[
"Returns",
"a",
"dictionary",
"where",
"each",
"key",
"is",
"associated",
"with",
"a",
"list",
"of",
"values",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/multidict.py#L108-L119
|
18,857
|
basho/riak-python-client
|
riak/client/multi.py
|
MultiPool.enq
|
def enq(self, task):
"""
Enqueues a fetch task to the pool of workers. This will raise
a RuntimeError if the pool is stopped or in the process of
stopping.
:param task: the Task object
:type task: Task or PutTask
"""
if not self._stop.is_set():
self._inq.put(task)
else:
raise RuntimeError("Attempted to enqueue an operation while "
"multi pool was shutdown!")
|
python
|
def enq(self, task):
"""
Enqueues a fetch task to the pool of workers. This will raise
a RuntimeError if the pool is stopped or in the process of
stopping.
:param task: the Task object
:type task: Task or PutTask
"""
if not self._stop.is_set():
self._inq.put(task)
else:
raise RuntimeError("Attempted to enqueue an operation while "
"multi pool was shutdown!")
|
[
"def",
"enq",
"(",
"self",
",",
"task",
")",
":",
"if",
"not",
"self",
".",
"_stop",
".",
"is_set",
"(",
")",
":",
"self",
".",
"_inq",
".",
"put",
"(",
"task",
")",
"else",
":",
"raise",
"RuntimeError",
"(",
"\"Attempted to enqueue an operation while \"",
"\"multi pool was shutdown!\"",
")"
] |
Enqueues a fetch task to the pool of workers. This will raise
a RuntimeError if the pool is stopped or in the process of
stopping.
:param task: the Task object
:type task: Task or PutTask
|
[
"Enqueues",
"a",
"fetch",
"task",
"to",
"the",
"pool",
"of",
"workers",
".",
"This",
"will",
"raise",
"a",
"RuntimeError",
"if",
"the",
"pool",
"is",
"stopped",
"or",
"in",
"the",
"process",
"of",
"stopping",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L73-L86
|
18,858
|
basho/riak-python-client
|
riak/client/multi.py
|
MultiPool.start
|
def start(self):
"""
Starts the worker threads if they are not already started.
This method is thread-safe and will be called automatically
when executing an operation.
"""
# Check whether we are already started, skip if we are.
if not self._started.is_set():
# If we are not started, try to capture the lock.
if self._lock.acquire(False):
# If we got the lock, go ahead and start the worker
# threads, set the started flag, and release the lock.
for i in range(self._size):
name = "riak.client.multi-worker-{0}-{1}".format(
self._name, i)
worker = Thread(target=self._worker_method, name=name)
worker.daemon = False
worker.start()
self._workers.append(worker)
self._started.set()
self._lock.release()
else:
# We didn't get the lock, so someone else is already
# starting the worker threads. Wait until they have
# signaled that the threads are started.
self._started.wait()
|
python
|
def start(self):
"""
Starts the worker threads if they are not already started.
This method is thread-safe and will be called automatically
when executing an operation.
"""
# Check whether we are already started, skip if we are.
if not self._started.is_set():
# If we are not started, try to capture the lock.
if self._lock.acquire(False):
# If we got the lock, go ahead and start the worker
# threads, set the started flag, and release the lock.
for i in range(self._size):
name = "riak.client.multi-worker-{0}-{1}".format(
self._name, i)
worker = Thread(target=self._worker_method, name=name)
worker.daemon = False
worker.start()
self._workers.append(worker)
self._started.set()
self._lock.release()
else:
# We didn't get the lock, so someone else is already
# starting the worker threads. Wait until they have
# signaled that the threads are started.
self._started.wait()
|
[
"def",
"start",
"(",
"self",
")",
":",
"# Check whether we are already started, skip if we are.",
"if",
"not",
"self",
".",
"_started",
".",
"is_set",
"(",
")",
":",
"# If we are not started, try to capture the lock.",
"if",
"self",
".",
"_lock",
".",
"acquire",
"(",
"False",
")",
":",
"# If we got the lock, go ahead and start the worker",
"# threads, set the started flag, and release the lock.",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"_size",
")",
":",
"name",
"=",
"\"riak.client.multi-worker-{0}-{1}\"",
".",
"format",
"(",
"self",
".",
"_name",
",",
"i",
")",
"worker",
"=",
"Thread",
"(",
"target",
"=",
"self",
".",
"_worker_method",
",",
"name",
"=",
"name",
")",
"worker",
".",
"daemon",
"=",
"False",
"worker",
".",
"start",
"(",
")",
"self",
".",
"_workers",
".",
"append",
"(",
"worker",
")",
"self",
".",
"_started",
".",
"set",
"(",
")",
"self",
".",
"_lock",
".",
"release",
"(",
")",
"else",
":",
"# We didn't get the lock, so someone else is already",
"# starting the worker threads. Wait until they have",
"# signaled that the threads are started.",
"self",
".",
"_started",
".",
"wait",
"(",
")"
] |
Starts the worker threads if they are not already started.
This method is thread-safe and will be called automatically
when executing an operation.
|
[
"Starts",
"the",
"worker",
"threads",
"if",
"they",
"are",
"not",
"already",
"started",
".",
"This",
"method",
"is",
"thread",
"-",
"safe",
"and",
"will",
"be",
"called",
"automatically",
"when",
"executing",
"an",
"operation",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L88-L113
|
18,859
|
basho/riak-python-client
|
riak/client/multi.py
|
MultiPool.stop
|
def stop(self):
"""
Signals the worker threads to exit and waits on them.
"""
if not self.stopped():
self._stop.set()
for worker in self._workers:
worker.join()
|
python
|
def stop(self):
"""
Signals the worker threads to exit and waits on them.
"""
if not self.stopped():
self._stop.set()
for worker in self._workers:
worker.join()
|
[
"def",
"stop",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"stopped",
"(",
")",
":",
"self",
".",
"_stop",
".",
"set",
"(",
")",
"for",
"worker",
"in",
"self",
".",
"_workers",
":",
"worker",
".",
"join",
"(",
")"
] |
Signals the worker threads to exit and waits on them.
|
[
"Signals",
"the",
"worker",
"threads",
"to",
"exit",
"and",
"waits",
"on",
"them",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/client/multi.py#L115-L122
|
18,860
|
basho/riak-python-client
|
riak/datatypes/map.py
|
Map._check_key
|
def _check_key(self, key):
"""
Ensures well-formedness of a key.
"""
if not len(key) == 2:
raise TypeError('invalid key: %r' % key)
elif key[1] not in TYPES:
raise TypeError('invalid datatype: %s' % key[1])
|
python
|
def _check_key(self, key):
"""
Ensures well-formedness of a key.
"""
if not len(key) == 2:
raise TypeError('invalid key: %r' % key)
elif key[1] not in TYPES:
raise TypeError('invalid datatype: %s' % key[1])
|
[
"def",
"_check_key",
"(",
"self",
",",
"key",
")",
":",
"if",
"not",
"len",
"(",
"key",
")",
"==",
"2",
":",
"raise",
"TypeError",
"(",
"'invalid key: %r'",
"%",
"key",
")",
"elif",
"key",
"[",
"1",
"]",
"not",
"in",
"TYPES",
":",
"raise",
"TypeError",
"(",
"'invalid datatype: %s'",
"%",
"key",
"[",
"1",
"]",
")"
] |
Ensures well-formedness of a key.
|
[
"Ensures",
"well",
"-",
"formedness",
"of",
"a",
"key",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/map.py#L227-L234
|
18,861
|
basho/riak-python-client
|
riak/datatypes/map.py
|
Map.modified
|
def modified(self):
"""
Whether the map has staged local modifications.
"""
if self._removes:
return True
for v in self._value:
if self._value[v].modified:
return True
for v in self._updates:
if self._updates[v].modified:
return True
return False
|
python
|
def modified(self):
"""
Whether the map has staged local modifications.
"""
if self._removes:
return True
for v in self._value:
if self._value[v].modified:
return True
for v in self._updates:
if self._updates[v].modified:
return True
return False
|
[
"def",
"modified",
"(",
"self",
")",
":",
"if",
"self",
".",
"_removes",
":",
"return",
"True",
"for",
"v",
"in",
"self",
".",
"_value",
":",
"if",
"self",
".",
"_value",
"[",
"v",
"]",
".",
"modified",
":",
"return",
"True",
"for",
"v",
"in",
"self",
".",
"_updates",
":",
"if",
"self",
".",
"_updates",
"[",
"v",
"]",
".",
"modified",
":",
"return",
"True",
"return",
"False"
] |
Whether the map has staged local modifications.
|
[
"Whether",
"the",
"map",
"has",
"staged",
"local",
"modifications",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/map.py#L252-L264
|
18,862
|
basho/riak-python-client
|
commands.py
|
build_messages._format_python2_or_3
|
def _format_python2_or_3(self):
"""
Change the PB files to use full pathnames for Python 3.x
and modify the metaclasses to be version agnostic
"""
pb_files = set()
with open(self.source, 'r', buffering=1) as csvfile:
reader = csv.reader(csvfile)
for row in reader:
_, _, proto = row
pb_files.add('riak/pb/{0}_pb2.py'.format(proto))
for im in sorted(pb_files):
with open(im, 'r', buffering=1) as pbfile:
contents = 'from six import *\n' + pbfile.read()
contents = re.sub(r'riak_pb2',
r'riak.pb.riak_pb2',
contents)
# Look for this pattern in the protoc-generated file:
#
# class RpbCounterGetResp(_message.Message):
# __metaclass__ = _reflection.GeneratedProtocolMessageType
#
# and convert it to:
#
# @add_metaclass(_reflection.GeneratedProtocolMessageType)
# class RpbCounterGetResp(_message.Message):
contents = re.sub(
r'class\s+(\S+)\((\S+)\):\s*\n'
'\s+__metaclass__\s+=\s+(\S+)\s*\n',
r'@add_metaclass(\3)\nclass \1(\2):\n', contents)
with open(im, 'w', buffering=1) as pbfile:
pbfile.write(contents)
|
python
|
def _format_python2_or_3(self):
"""
Change the PB files to use full pathnames for Python 3.x
and modify the metaclasses to be version agnostic
"""
pb_files = set()
with open(self.source, 'r', buffering=1) as csvfile:
reader = csv.reader(csvfile)
for row in reader:
_, _, proto = row
pb_files.add('riak/pb/{0}_pb2.py'.format(proto))
for im in sorted(pb_files):
with open(im, 'r', buffering=1) as pbfile:
contents = 'from six import *\n' + pbfile.read()
contents = re.sub(r'riak_pb2',
r'riak.pb.riak_pb2',
contents)
# Look for this pattern in the protoc-generated file:
#
# class RpbCounterGetResp(_message.Message):
# __metaclass__ = _reflection.GeneratedProtocolMessageType
#
# and convert it to:
#
# @add_metaclass(_reflection.GeneratedProtocolMessageType)
# class RpbCounterGetResp(_message.Message):
contents = re.sub(
r'class\s+(\S+)\((\S+)\):\s*\n'
'\s+__metaclass__\s+=\s+(\S+)\s*\n',
r'@add_metaclass(\3)\nclass \1(\2):\n', contents)
with open(im, 'w', buffering=1) as pbfile:
pbfile.write(contents)
|
[
"def",
"_format_python2_or_3",
"(",
"self",
")",
":",
"pb_files",
"=",
"set",
"(",
")",
"with",
"open",
"(",
"self",
".",
"source",
",",
"'r'",
",",
"buffering",
"=",
"1",
")",
"as",
"csvfile",
":",
"reader",
"=",
"csv",
".",
"reader",
"(",
"csvfile",
")",
"for",
"row",
"in",
"reader",
":",
"_",
",",
"_",
",",
"proto",
"=",
"row",
"pb_files",
".",
"add",
"(",
"'riak/pb/{0}_pb2.py'",
".",
"format",
"(",
"proto",
")",
")",
"for",
"im",
"in",
"sorted",
"(",
"pb_files",
")",
":",
"with",
"open",
"(",
"im",
",",
"'r'",
",",
"buffering",
"=",
"1",
")",
"as",
"pbfile",
":",
"contents",
"=",
"'from six import *\\n'",
"+",
"pbfile",
".",
"read",
"(",
")",
"contents",
"=",
"re",
".",
"sub",
"(",
"r'riak_pb2'",
",",
"r'riak.pb.riak_pb2'",
",",
"contents",
")",
"# Look for this pattern in the protoc-generated file:",
"#",
"# class RpbCounterGetResp(_message.Message):",
"# __metaclass__ = _reflection.GeneratedProtocolMessageType",
"#",
"# and convert it to:",
"#",
"# @add_metaclass(_reflection.GeneratedProtocolMessageType)",
"# class RpbCounterGetResp(_message.Message):",
"contents",
"=",
"re",
".",
"sub",
"(",
"r'class\\s+(\\S+)\\((\\S+)\\):\\s*\\n'",
"'\\s+__metaclass__\\s+=\\s+(\\S+)\\s*\\n'",
",",
"r'@add_metaclass(\\3)\\nclass \\1(\\2):\\n'",
",",
"contents",
")",
"with",
"open",
"(",
"im",
",",
"'w'",
",",
"buffering",
"=",
"1",
")",
"as",
"pbfile",
":",
"pbfile",
".",
"write",
"(",
"contents",
")"
] |
Change the PB files to use full pathnames for Python 3.x
and modify the metaclasses to be version agnostic
|
[
"Change",
"the",
"PB",
"files",
"to",
"use",
"full",
"pathnames",
"for",
"Python",
"3",
".",
"x",
"and",
"modify",
"the",
"metaclasses",
"to",
"be",
"version",
"agnostic"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/commands.py#L372-L405
|
18,863
|
basho/riak-python-client
|
riak/datatypes/datatype.py
|
Datatype.reload
|
def reload(self, **params):
"""
Reloads the datatype from Riak.
.. warning: This clears any local modifications you might have
made.
:param r: the read quorum
:type r: integer, string, None
:param pr: the primary read quorum
:type pr: integer, string, None
:param basic_quorum: whether to use the "basic quorum" policy
for not-founds
:type basic_quorum: bool
:param notfound_ok: whether to treat not-found responses as successful
:type notfound_ok: bool
:param timeout: a timeout value in milliseconds
:type timeout: int
:param include_context: whether to return the opaque context
as well as the value, which is useful for removal operations
on sets and maps
:type include_context: bool
:rtype: :class:`Datatype`
"""
if not self.bucket:
raise ValueError('bucket property not assigned')
if not self.key:
raise ValueError('key property not assigned')
dtype, value, context = self.bucket._client._fetch_datatype(
self.bucket, self.key, **params)
if not dtype == self.type_name:
raise TypeError("Expected datatype {} but "
"got datatype {}".format(self.__class__,
TYPES[dtype]))
self.clear()
self._context = context
self._set_value(value)
return self
|
python
|
def reload(self, **params):
"""
Reloads the datatype from Riak.
.. warning: This clears any local modifications you might have
made.
:param r: the read quorum
:type r: integer, string, None
:param pr: the primary read quorum
:type pr: integer, string, None
:param basic_quorum: whether to use the "basic quorum" policy
for not-founds
:type basic_quorum: bool
:param notfound_ok: whether to treat not-found responses as successful
:type notfound_ok: bool
:param timeout: a timeout value in milliseconds
:type timeout: int
:param include_context: whether to return the opaque context
as well as the value, which is useful for removal operations
on sets and maps
:type include_context: bool
:rtype: :class:`Datatype`
"""
if not self.bucket:
raise ValueError('bucket property not assigned')
if not self.key:
raise ValueError('key property not assigned')
dtype, value, context = self.bucket._client._fetch_datatype(
self.bucket, self.key, **params)
if not dtype == self.type_name:
raise TypeError("Expected datatype {} but "
"got datatype {}".format(self.__class__,
TYPES[dtype]))
self.clear()
self._context = context
self._set_value(value)
return self
|
[
"def",
"reload",
"(",
"self",
",",
"*",
"*",
"params",
")",
":",
"if",
"not",
"self",
".",
"bucket",
":",
"raise",
"ValueError",
"(",
"'bucket property not assigned'",
")",
"if",
"not",
"self",
".",
"key",
":",
"raise",
"ValueError",
"(",
"'key property not assigned'",
")",
"dtype",
",",
"value",
",",
"context",
"=",
"self",
".",
"bucket",
".",
"_client",
".",
"_fetch_datatype",
"(",
"self",
".",
"bucket",
",",
"self",
".",
"key",
",",
"*",
"*",
"params",
")",
"if",
"not",
"dtype",
"==",
"self",
".",
"type_name",
":",
"raise",
"TypeError",
"(",
"\"Expected datatype {} but \"",
"\"got datatype {}\"",
".",
"format",
"(",
"self",
".",
"__class__",
",",
"TYPES",
"[",
"dtype",
"]",
")",
")",
"self",
".",
"clear",
"(",
")",
"self",
".",
"_context",
"=",
"context",
"self",
".",
"_set_value",
"(",
"value",
")",
"return",
"self"
] |
Reloads the datatype from Riak.
.. warning: This clears any local modifications you might have
made.
:param r: the read quorum
:type r: integer, string, None
:param pr: the primary read quorum
:type pr: integer, string, None
:param basic_quorum: whether to use the "basic quorum" policy
for not-founds
:type basic_quorum: bool
:param notfound_ok: whether to treat not-found responses as successful
:type notfound_ok: bool
:param timeout: a timeout value in milliseconds
:type timeout: int
:param include_context: whether to return the opaque context
as well as the value, which is useful for removal operations
on sets and maps
:type include_context: bool
:rtype: :class:`Datatype`
|
[
"Reloads",
"the",
"datatype",
"from",
"Riak",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/datatype.py#L79-L120
|
18,864
|
basho/riak-python-client
|
riak/datatypes/datatype.py
|
Datatype.update
|
def update(self, **params):
"""
Sends locally staged mutations to Riak.
:param w: W-value, wait for this many partitions to respond
before returning to client.
:type w: integer
:param dw: DW-value, wait for this many partitions to
confirm the write before returning to client.
:type dw: integer
:param pw: PW-value, require this many primary partitions to
be available before performing the put
:type pw: integer
:param return_body: if the newly stored object should be
retrieved, defaults to True
:type return_body: bool
:param include_context: whether to return the new opaque
context when `return_body` is `True`
:type include_context: bool
:param timeout: a timeout value in milliseconds
:type timeout: int
:rtype: a subclass of :class:`~riak.datatypes.Datatype`
"""
if not self.modified:
raise ValueError("No operation to perform")
params.setdefault('return_body', True)
self.bucket._client.update_datatype(self, **params)
self.clear()
return self
|
python
|
def update(self, **params):
"""
Sends locally staged mutations to Riak.
:param w: W-value, wait for this many partitions to respond
before returning to client.
:type w: integer
:param dw: DW-value, wait for this many partitions to
confirm the write before returning to client.
:type dw: integer
:param pw: PW-value, require this many primary partitions to
be available before performing the put
:type pw: integer
:param return_body: if the newly stored object should be
retrieved, defaults to True
:type return_body: bool
:param include_context: whether to return the new opaque
context when `return_body` is `True`
:type include_context: bool
:param timeout: a timeout value in milliseconds
:type timeout: int
:rtype: a subclass of :class:`~riak.datatypes.Datatype`
"""
if not self.modified:
raise ValueError("No operation to perform")
params.setdefault('return_body', True)
self.bucket._client.update_datatype(self, **params)
self.clear()
return self
|
[
"def",
"update",
"(",
"self",
",",
"*",
"*",
"params",
")",
":",
"if",
"not",
"self",
".",
"modified",
":",
"raise",
"ValueError",
"(",
"\"No operation to perform\"",
")",
"params",
".",
"setdefault",
"(",
"'return_body'",
",",
"True",
")",
"self",
".",
"bucket",
".",
"_client",
".",
"update_datatype",
"(",
"self",
",",
"*",
"*",
"params",
")",
"self",
".",
"clear",
"(",
")",
"return",
"self"
] |
Sends locally staged mutations to Riak.
:param w: W-value, wait for this many partitions to respond
before returning to client.
:type w: integer
:param dw: DW-value, wait for this many partitions to
confirm the write before returning to client.
:type dw: integer
:param pw: PW-value, require this many primary partitions to
be available before performing the put
:type pw: integer
:param return_body: if the newly stored object should be
retrieved, defaults to True
:type return_body: bool
:param include_context: whether to return the new opaque
context when `return_body` is `True`
:type include_context: bool
:param timeout: a timeout value in milliseconds
:type timeout: int
:rtype: a subclass of :class:`~riak.datatypes.Datatype`
|
[
"Sends",
"locally",
"staged",
"mutations",
"to",
"Riak",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/datatype.py#L133-L163
|
18,865
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.encode_quorum
|
def encode_quorum(self, rw):
"""
Converts a symbolic quorum value into its on-the-wire
equivalent.
:param rw: the quorum
:type rw: string, integer
:rtype: integer
"""
if rw in QUORUM_TO_PB:
return QUORUM_TO_PB[rw]
elif type(rw) is int and rw >= 0:
return rw
else:
return None
|
python
|
def encode_quorum(self, rw):
"""
Converts a symbolic quorum value into its on-the-wire
equivalent.
:param rw: the quorum
:type rw: string, integer
:rtype: integer
"""
if rw in QUORUM_TO_PB:
return QUORUM_TO_PB[rw]
elif type(rw) is int and rw >= 0:
return rw
else:
return None
|
[
"def",
"encode_quorum",
"(",
"self",
",",
"rw",
")",
":",
"if",
"rw",
"in",
"QUORUM_TO_PB",
":",
"return",
"QUORUM_TO_PB",
"[",
"rw",
"]",
"elif",
"type",
"(",
"rw",
")",
"is",
"int",
"and",
"rw",
">=",
"0",
":",
"return",
"rw",
"else",
":",
"return",
"None"
] |
Converts a symbolic quorum value into its on-the-wire
equivalent.
:param rw: the quorum
:type rw: string, integer
:rtype: integer
|
[
"Converts",
"a",
"symbolic",
"quorum",
"value",
"into",
"its",
"on",
"-",
"the",
"-",
"wire",
"equivalent",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L124-L138
|
18,866
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.decode_contents
|
def decode_contents(self, contents, obj):
"""
Decodes the list of siblings from the protobuf representation
into the object.
:param contents: a list of RpbContent messages
:type contents: list
:param obj: a RiakObject
:type obj: RiakObject
:rtype RiakObject
"""
obj.siblings = [self.decode_content(c, RiakContent(obj))
for c in contents]
# Invoke sibling-resolution logic
if len(obj.siblings) > 1 and obj.resolver is not None:
obj.resolver(obj)
return obj
|
python
|
def decode_contents(self, contents, obj):
"""
Decodes the list of siblings from the protobuf representation
into the object.
:param contents: a list of RpbContent messages
:type contents: list
:param obj: a RiakObject
:type obj: RiakObject
:rtype RiakObject
"""
obj.siblings = [self.decode_content(c, RiakContent(obj))
for c in contents]
# Invoke sibling-resolution logic
if len(obj.siblings) > 1 and obj.resolver is not None:
obj.resolver(obj)
return obj
|
[
"def",
"decode_contents",
"(",
"self",
",",
"contents",
",",
"obj",
")",
":",
"obj",
".",
"siblings",
"=",
"[",
"self",
".",
"decode_content",
"(",
"c",
",",
"RiakContent",
"(",
"obj",
")",
")",
"for",
"c",
"in",
"contents",
"]",
"# Invoke sibling-resolution logic",
"if",
"len",
"(",
"obj",
".",
"siblings",
")",
">",
"1",
"and",
"obj",
".",
"resolver",
"is",
"not",
"None",
":",
"obj",
".",
"resolver",
"(",
"obj",
")",
"return",
"obj"
] |
Decodes the list of siblings from the protobuf representation
into the object.
:param contents: a list of RpbContent messages
:type contents: list
:param obj: a RiakObject
:type obj: RiakObject
:rtype RiakObject
|
[
"Decodes",
"the",
"list",
"of",
"siblings",
"from",
"the",
"protobuf",
"representation",
"into",
"the",
"object",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L154-L170
|
18,867
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.decode_content
|
def decode_content(self, rpb_content, sibling):
"""
Decodes a single sibling from the protobuf representation into
a RiakObject.
:param rpb_content: a single RpbContent message
:type rpb_content: riak.pb.riak_pb2.RpbContent
:param sibling: a RiakContent sibling container
:type sibling: RiakContent
:rtype: RiakContent
"""
if rpb_content.HasField("deleted") and rpb_content.deleted:
sibling.exists = False
else:
sibling.exists = True
if rpb_content.HasField("content_type"):
sibling.content_type = bytes_to_str(rpb_content.content_type)
if rpb_content.HasField("charset"):
sibling.charset = bytes_to_str(rpb_content.charset)
if rpb_content.HasField("content_encoding"):
sibling.content_encoding = \
bytes_to_str(rpb_content.content_encoding)
if rpb_content.HasField("vtag"):
sibling.etag = bytes_to_str(rpb_content.vtag)
sibling.links = [self.decode_link(link)
for link in rpb_content.links]
if rpb_content.HasField("last_mod"):
sibling.last_modified = float(rpb_content.last_mod)
if rpb_content.HasField("last_mod_usecs"):
sibling.last_modified += rpb_content.last_mod_usecs / 1000000.0
sibling.usermeta = dict([(bytes_to_str(usermd.key),
bytes_to_str(usermd.value))
for usermd in rpb_content.usermeta])
sibling.indexes = set([(bytes_to_str(index.key),
decode_index_value(index.key, index.value))
for index in rpb_content.indexes])
sibling.encoded_data = rpb_content.value
return sibling
|
python
|
def decode_content(self, rpb_content, sibling):
"""
Decodes a single sibling from the protobuf representation into
a RiakObject.
:param rpb_content: a single RpbContent message
:type rpb_content: riak.pb.riak_pb2.RpbContent
:param sibling: a RiakContent sibling container
:type sibling: RiakContent
:rtype: RiakContent
"""
if rpb_content.HasField("deleted") and rpb_content.deleted:
sibling.exists = False
else:
sibling.exists = True
if rpb_content.HasField("content_type"):
sibling.content_type = bytes_to_str(rpb_content.content_type)
if rpb_content.HasField("charset"):
sibling.charset = bytes_to_str(rpb_content.charset)
if rpb_content.HasField("content_encoding"):
sibling.content_encoding = \
bytes_to_str(rpb_content.content_encoding)
if rpb_content.HasField("vtag"):
sibling.etag = bytes_to_str(rpb_content.vtag)
sibling.links = [self.decode_link(link)
for link in rpb_content.links]
if rpb_content.HasField("last_mod"):
sibling.last_modified = float(rpb_content.last_mod)
if rpb_content.HasField("last_mod_usecs"):
sibling.last_modified += rpb_content.last_mod_usecs / 1000000.0
sibling.usermeta = dict([(bytes_to_str(usermd.key),
bytes_to_str(usermd.value))
for usermd in rpb_content.usermeta])
sibling.indexes = set([(bytes_to_str(index.key),
decode_index_value(index.key, index.value))
for index in rpb_content.indexes])
sibling.encoded_data = rpb_content.value
return sibling
|
[
"def",
"decode_content",
"(",
"self",
",",
"rpb_content",
",",
"sibling",
")",
":",
"if",
"rpb_content",
".",
"HasField",
"(",
"\"deleted\"",
")",
"and",
"rpb_content",
".",
"deleted",
":",
"sibling",
".",
"exists",
"=",
"False",
"else",
":",
"sibling",
".",
"exists",
"=",
"True",
"if",
"rpb_content",
".",
"HasField",
"(",
"\"content_type\"",
")",
":",
"sibling",
".",
"content_type",
"=",
"bytes_to_str",
"(",
"rpb_content",
".",
"content_type",
")",
"if",
"rpb_content",
".",
"HasField",
"(",
"\"charset\"",
")",
":",
"sibling",
".",
"charset",
"=",
"bytes_to_str",
"(",
"rpb_content",
".",
"charset",
")",
"if",
"rpb_content",
".",
"HasField",
"(",
"\"content_encoding\"",
")",
":",
"sibling",
".",
"content_encoding",
"=",
"bytes_to_str",
"(",
"rpb_content",
".",
"content_encoding",
")",
"if",
"rpb_content",
".",
"HasField",
"(",
"\"vtag\"",
")",
":",
"sibling",
".",
"etag",
"=",
"bytes_to_str",
"(",
"rpb_content",
".",
"vtag",
")",
"sibling",
".",
"links",
"=",
"[",
"self",
".",
"decode_link",
"(",
"link",
")",
"for",
"link",
"in",
"rpb_content",
".",
"links",
"]",
"if",
"rpb_content",
".",
"HasField",
"(",
"\"last_mod\"",
")",
":",
"sibling",
".",
"last_modified",
"=",
"float",
"(",
"rpb_content",
".",
"last_mod",
")",
"if",
"rpb_content",
".",
"HasField",
"(",
"\"last_mod_usecs\"",
")",
":",
"sibling",
".",
"last_modified",
"+=",
"rpb_content",
".",
"last_mod_usecs",
"/",
"1000000.0",
"sibling",
".",
"usermeta",
"=",
"dict",
"(",
"[",
"(",
"bytes_to_str",
"(",
"usermd",
".",
"key",
")",
",",
"bytes_to_str",
"(",
"usermd",
".",
"value",
")",
")",
"for",
"usermd",
"in",
"rpb_content",
".",
"usermeta",
"]",
")",
"sibling",
".",
"indexes",
"=",
"set",
"(",
"[",
"(",
"bytes_to_str",
"(",
"index",
".",
"key",
")",
",",
"decode_index_value",
"(",
"index",
".",
"key",
",",
"index",
".",
"value",
")",
")",
"for",
"index",
"in",
"rpb_content",
".",
"indexes",
"]",
")",
"sibling",
".",
"encoded_data",
"=",
"rpb_content",
".",
"value",
"return",
"sibling"
] |
Decodes a single sibling from the protobuf representation into
a RiakObject.
:param rpb_content: a single RpbContent message
:type rpb_content: riak.pb.riak_pb2.RpbContent
:param sibling: a RiakContent sibling container
:type sibling: RiakContent
:rtype: RiakContent
|
[
"Decodes",
"a",
"single",
"sibling",
"from",
"the",
"protobuf",
"representation",
"into",
"a",
"RiakObject",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L172-L213
|
18,868
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.encode_content
|
def encode_content(self, robj, rpb_content):
"""
Fills an RpbContent message with the appropriate data and
metadata from a RiakObject.
:param robj: a RiakObject
:type robj: RiakObject
:param rpb_content: the protobuf message to fill
:type rpb_content: riak.pb.riak_pb2.RpbContent
"""
if robj.content_type:
rpb_content.content_type = str_to_bytes(robj.content_type)
if robj.charset:
rpb_content.charset = str_to_bytes(robj.charset)
if robj.content_encoding:
rpb_content.content_encoding = str_to_bytes(robj.content_encoding)
for uk in robj.usermeta:
pair = rpb_content.usermeta.add()
pair.key = str_to_bytes(uk)
pair.value = str_to_bytes(robj.usermeta[uk])
for link in robj.links:
pb_link = rpb_content.links.add()
try:
bucket, key, tag = link
except ValueError:
raise RiakError("Invalid link tuple %s" % link)
pb_link.bucket = str_to_bytes(bucket)
pb_link.key = str_to_bytes(key)
if tag:
pb_link.tag = str_to_bytes(tag)
else:
pb_link.tag = str_to_bytes('')
for field, value in robj.indexes:
pair = rpb_content.indexes.add()
pair.key = str_to_bytes(field)
pair.value = str_to_bytes(str(value))
# Python 2.x data is stored in a string
if six.PY2:
rpb_content.value = str(robj.encoded_data)
else:
rpb_content.value = robj.encoded_data
|
python
|
def encode_content(self, robj, rpb_content):
"""
Fills an RpbContent message with the appropriate data and
metadata from a RiakObject.
:param robj: a RiakObject
:type robj: RiakObject
:param rpb_content: the protobuf message to fill
:type rpb_content: riak.pb.riak_pb2.RpbContent
"""
if robj.content_type:
rpb_content.content_type = str_to_bytes(robj.content_type)
if robj.charset:
rpb_content.charset = str_to_bytes(robj.charset)
if robj.content_encoding:
rpb_content.content_encoding = str_to_bytes(robj.content_encoding)
for uk in robj.usermeta:
pair = rpb_content.usermeta.add()
pair.key = str_to_bytes(uk)
pair.value = str_to_bytes(robj.usermeta[uk])
for link in robj.links:
pb_link = rpb_content.links.add()
try:
bucket, key, tag = link
except ValueError:
raise RiakError("Invalid link tuple %s" % link)
pb_link.bucket = str_to_bytes(bucket)
pb_link.key = str_to_bytes(key)
if tag:
pb_link.tag = str_to_bytes(tag)
else:
pb_link.tag = str_to_bytes('')
for field, value in robj.indexes:
pair = rpb_content.indexes.add()
pair.key = str_to_bytes(field)
pair.value = str_to_bytes(str(value))
# Python 2.x data is stored in a string
if six.PY2:
rpb_content.value = str(robj.encoded_data)
else:
rpb_content.value = robj.encoded_data
|
[
"def",
"encode_content",
"(",
"self",
",",
"robj",
",",
"rpb_content",
")",
":",
"if",
"robj",
".",
"content_type",
":",
"rpb_content",
".",
"content_type",
"=",
"str_to_bytes",
"(",
"robj",
".",
"content_type",
")",
"if",
"robj",
".",
"charset",
":",
"rpb_content",
".",
"charset",
"=",
"str_to_bytes",
"(",
"robj",
".",
"charset",
")",
"if",
"robj",
".",
"content_encoding",
":",
"rpb_content",
".",
"content_encoding",
"=",
"str_to_bytes",
"(",
"robj",
".",
"content_encoding",
")",
"for",
"uk",
"in",
"robj",
".",
"usermeta",
":",
"pair",
"=",
"rpb_content",
".",
"usermeta",
".",
"add",
"(",
")",
"pair",
".",
"key",
"=",
"str_to_bytes",
"(",
"uk",
")",
"pair",
".",
"value",
"=",
"str_to_bytes",
"(",
"robj",
".",
"usermeta",
"[",
"uk",
"]",
")",
"for",
"link",
"in",
"robj",
".",
"links",
":",
"pb_link",
"=",
"rpb_content",
".",
"links",
".",
"add",
"(",
")",
"try",
":",
"bucket",
",",
"key",
",",
"tag",
"=",
"link",
"except",
"ValueError",
":",
"raise",
"RiakError",
"(",
"\"Invalid link tuple %s\"",
"%",
"link",
")",
"pb_link",
".",
"bucket",
"=",
"str_to_bytes",
"(",
"bucket",
")",
"pb_link",
".",
"key",
"=",
"str_to_bytes",
"(",
"key",
")",
"if",
"tag",
":",
"pb_link",
".",
"tag",
"=",
"str_to_bytes",
"(",
"tag",
")",
"else",
":",
"pb_link",
".",
"tag",
"=",
"str_to_bytes",
"(",
"''",
")",
"for",
"field",
",",
"value",
"in",
"robj",
".",
"indexes",
":",
"pair",
"=",
"rpb_content",
".",
"indexes",
".",
"add",
"(",
")",
"pair",
".",
"key",
"=",
"str_to_bytes",
"(",
"field",
")",
"pair",
".",
"value",
"=",
"str_to_bytes",
"(",
"str",
"(",
"value",
")",
")",
"# Python 2.x data is stored in a string",
"if",
"six",
".",
"PY2",
":",
"rpb_content",
".",
"value",
"=",
"str",
"(",
"robj",
".",
"encoded_data",
")",
"else",
":",
"rpb_content",
".",
"value",
"=",
"robj",
".",
"encoded_data"
] |
Fills an RpbContent message with the appropriate data and
metadata from a RiakObject.
:param robj: a RiakObject
:type robj: RiakObject
:param rpb_content: the protobuf message to fill
:type rpb_content: riak.pb.riak_pb2.RpbContent
|
[
"Fills",
"an",
"RpbContent",
"message",
"with",
"the",
"appropriate",
"data",
"and",
"metadata",
"from",
"a",
"RiakObject",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L215-L258
|
18,869
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.decode_link
|
def decode_link(self, link):
"""
Decodes an RpbLink message into a tuple
:param link: an RpbLink message
:type link: riak.pb.riak_pb2.RpbLink
:rtype tuple
"""
if link.HasField("bucket"):
bucket = bytes_to_str(link.bucket)
else:
bucket = None
if link.HasField("key"):
key = bytes_to_str(link.key)
else:
key = None
if link.HasField("tag"):
tag = bytes_to_str(link.tag)
else:
tag = None
return (bucket, key, tag)
|
python
|
def decode_link(self, link):
"""
Decodes an RpbLink message into a tuple
:param link: an RpbLink message
:type link: riak.pb.riak_pb2.RpbLink
:rtype tuple
"""
if link.HasField("bucket"):
bucket = bytes_to_str(link.bucket)
else:
bucket = None
if link.HasField("key"):
key = bytes_to_str(link.key)
else:
key = None
if link.HasField("tag"):
tag = bytes_to_str(link.tag)
else:
tag = None
return (bucket, key, tag)
|
[
"def",
"decode_link",
"(",
"self",
",",
"link",
")",
":",
"if",
"link",
".",
"HasField",
"(",
"\"bucket\"",
")",
":",
"bucket",
"=",
"bytes_to_str",
"(",
"link",
".",
"bucket",
")",
"else",
":",
"bucket",
"=",
"None",
"if",
"link",
".",
"HasField",
"(",
"\"key\"",
")",
":",
"key",
"=",
"bytes_to_str",
"(",
"link",
".",
"key",
")",
"else",
":",
"key",
"=",
"None",
"if",
"link",
".",
"HasField",
"(",
"\"tag\"",
")",
":",
"tag",
"=",
"bytes_to_str",
"(",
"link",
".",
"tag",
")",
"else",
":",
"tag",
"=",
"None",
"return",
"(",
"bucket",
",",
"key",
",",
"tag",
")"
] |
Decodes an RpbLink message into a tuple
:param link: an RpbLink message
:type link: riak.pb.riak_pb2.RpbLink
:rtype tuple
|
[
"Decodes",
"an",
"RpbLink",
"message",
"into",
"a",
"tuple"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L260-L282
|
18,870
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.encode_bucket_props
|
def encode_bucket_props(self, props, msg):
"""
Encodes a dict of bucket properties into the protobuf message.
:param props: bucket properties
:type props: dict
:param msg: the protobuf message to fill
:type msg: riak.pb.riak_pb2.RpbSetBucketReq
"""
for prop in NORMAL_PROPS:
if prop in props and props[prop] is not None:
if isinstance(props[prop], six.string_types):
setattr(msg.props, prop, str_to_bytes(props[prop]))
else:
setattr(msg.props, prop, props[prop])
for prop in COMMIT_HOOK_PROPS:
if prop in props:
setattr(msg.props, 'has_' + prop, True)
self.encode_hooklist(props[prop], getattr(msg.props, prop))
for prop in MODFUN_PROPS:
if prop in props and props[prop] is not None:
self.encode_modfun(props[prop], getattr(msg.props, prop))
for prop in QUORUM_PROPS:
if prop in props and props[prop] not in (None, 'default'):
value = self.encode_quorum(props[prop])
if value is not None:
if isinstance(value, six.string_types):
setattr(msg.props, prop, str_to_bytes(value))
else:
setattr(msg.props, prop, value)
if 'repl' in props:
msg.props.repl = REPL_TO_PB[props['repl']]
return msg
|
python
|
def encode_bucket_props(self, props, msg):
"""
Encodes a dict of bucket properties into the protobuf message.
:param props: bucket properties
:type props: dict
:param msg: the protobuf message to fill
:type msg: riak.pb.riak_pb2.RpbSetBucketReq
"""
for prop in NORMAL_PROPS:
if prop in props and props[prop] is not None:
if isinstance(props[prop], six.string_types):
setattr(msg.props, prop, str_to_bytes(props[prop]))
else:
setattr(msg.props, prop, props[prop])
for prop in COMMIT_HOOK_PROPS:
if prop in props:
setattr(msg.props, 'has_' + prop, True)
self.encode_hooklist(props[prop], getattr(msg.props, prop))
for prop in MODFUN_PROPS:
if prop in props and props[prop] is not None:
self.encode_modfun(props[prop], getattr(msg.props, prop))
for prop in QUORUM_PROPS:
if prop in props and props[prop] not in (None, 'default'):
value = self.encode_quorum(props[prop])
if value is not None:
if isinstance(value, six.string_types):
setattr(msg.props, prop, str_to_bytes(value))
else:
setattr(msg.props, prop, value)
if 'repl' in props:
msg.props.repl = REPL_TO_PB[props['repl']]
return msg
|
[
"def",
"encode_bucket_props",
"(",
"self",
",",
"props",
",",
"msg",
")",
":",
"for",
"prop",
"in",
"NORMAL_PROPS",
":",
"if",
"prop",
"in",
"props",
"and",
"props",
"[",
"prop",
"]",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"props",
"[",
"prop",
"]",
",",
"six",
".",
"string_types",
")",
":",
"setattr",
"(",
"msg",
".",
"props",
",",
"prop",
",",
"str_to_bytes",
"(",
"props",
"[",
"prop",
"]",
")",
")",
"else",
":",
"setattr",
"(",
"msg",
".",
"props",
",",
"prop",
",",
"props",
"[",
"prop",
"]",
")",
"for",
"prop",
"in",
"COMMIT_HOOK_PROPS",
":",
"if",
"prop",
"in",
"props",
":",
"setattr",
"(",
"msg",
".",
"props",
",",
"'has_'",
"+",
"prop",
",",
"True",
")",
"self",
".",
"encode_hooklist",
"(",
"props",
"[",
"prop",
"]",
",",
"getattr",
"(",
"msg",
".",
"props",
",",
"prop",
")",
")",
"for",
"prop",
"in",
"MODFUN_PROPS",
":",
"if",
"prop",
"in",
"props",
"and",
"props",
"[",
"prop",
"]",
"is",
"not",
"None",
":",
"self",
".",
"encode_modfun",
"(",
"props",
"[",
"prop",
"]",
",",
"getattr",
"(",
"msg",
".",
"props",
",",
"prop",
")",
")",
"for",
"prop",
"in",
"QUORUM_PROPS",
":",
"if",
"prop",
"in",
"props",
"and",
"props",
"[",
"prop",
"]",
"not",
"in",
"(",
"None",
",",
"'default'",
")",
":",
"value",
"=",
"self",
".",
"encode_quorum",
"(",
"props",
"[",
"prop",
"]",
")",
"if",
"value",
"is",
"not",
"None",
":",
"if",
"isinstance",
"(",
"value",
",",
"six",
".",
"string_types",
")",
":",
"setattr",
"(",
"msg",
".",
"props",
",",
"prop",
",",
"str_to_bytes",
"(",
"value",
")",
")",
"else",
":",
"setattr",
"(",
"msg",
".",
"props",
",",
"prop",
",",
"value",
")",
"if",
"'repl'",
"in",
"props",
":",
"msg",
".",
"props",
".",
"repl",
"=",
"REPL_TO_PB",
"[",
"props",
"[",
"'repl'",
"]",
"]",
"return",
"msg"
] |
Encodes a dict of bucket properties into the protobuf message.
:param props: bucket properties
:type props: dict
:param msg: the protobuf message to fill
:type msg: riak.pb.riak_pb2.RpbSetBucketReq
|
[
"Encodes",
"a",
"dict",
"of",
"bucket",
"properties",
"into",
"the",
"protobuf",
"message",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L298-L331
|
18,871
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.decode_bucket_props
|
def decode_bucket_props(self, msg):
"""
Decodes the protobuf bucket properties message into a dict.
:param msg: the protobuf message to decode
:type msg: riak.pb.riak_pb2.RpbBucketProps
:rtype dict
"""
props = {}
for prop in NORMAL_PROPS:
if msg.HasField(prop):
props[prop] = getattr(msg, prop)
if isinstance(props[prop], bytes):
props[prop] = bytes_to_str(props[prop])
for prop in COMMIT_HOOK_PROPS:
if getattr(msg, 'has_' + prop):
props[prop] = self.decode_hooklist(getattr(msg, prop))
for prop in MODFUN_PROPS:
if msg.HasField(prop):
props[prop] = self.decode_modfun(getattr(msg, prop))
for prop in QUORUM_PROPS:
if msg.HasField(prop):
props[prop] = self.decode_quorum(getattr(msg, prop))
if msg.HasField('repl'):
props['repl'] = REPL_TO_PY[msg.repl]
return props
|
python
|
def decode_bucket_props(self, msg):
"""
Decodes the protobuf bucket properties message into a dict.
:param msg: the protobuf message to decode
:type msg: riak.pb.riak_pb2.RpbBucketProps
:rtype dict
"""
props = {}
for prop in NORMAL_PROPS:
if msg.HasField(prop):
props[prop] = getattr(msg, prop)
if isinstance(props[prop], bytes):
props[prop] = bytes_to_str(props[prop])
for prop in COMMIT_HOOK_PROPS:
if getattr(msg, 'has_' + prop):
props[prop] = self.decode_hooklist(getattr(msg, prop))
for prop in MODFUN_PROPS:
if msg.HasField(prop):
props[prop] = self.decode_modfun(getattr(msg, prop))
for prop in QUORUM_PROPS:
if msg.HasField(prop):
props[prop] = self.decode_quorum(getattr(msg, prop))
if msg.HasField('repl'):
props['repl'] = REPL_TO_PY[msg.repl]
return props
|
[
"def",
"decode_bucket_props",
"(",
"self",
",",
"msg",
")",
":",
"props",
"=",
"{",
"}",
"for",
"prop",
"in",
"NORMAL_PROPS",
":",
"if",
"msg",
".",
"HasField",
"(",
"prop",
")",
":",
"props",
"[",
"prop",
"]",
"=",
"getattr",
"(",
"msg",
",",
"prop",
")",
"if",
"isinstance",
"(",
"props",
"[",
"prop",
"]",
",",
"bytes",
")",
":",
"props",
"[",
"prop",
"]",
"=",
"bytes_to_str",
"(",
"props",
"[",
"prop",
"]",
")",
"for",
"prop",
"in",
"COMMIT_HOOK_PROPS",
":",
"if",
"getattr",
"(",
"msg",
",",
"'has_'",
"+",
"prop",
")",
":",
"props",
"[",
"prop",
"]",
"=",
"self",
".",
"decode_hooklist",
"(",
"getattr",
"(",
"msg",
",",
"prop",
")",
")",
"for",
"prop",
"in",
"MODFUN_PROPS",
":",
"if",
"msg",
".",
"HasField",
"(",
"prop",
")",
":",
"props",
"[",
"prop",
"]",
"=",
"self",
".",
"decode_modfun",
"(",
"getattr",
"(",
"msg",
",",
"prop",
")",
")",
"for",
"prop",
"in",
"QUORUM_PROPS",
":",
"if",
"msg",
".",
"HasField",
"(",
"prop",
")",
":",
"props",
"[",
"prop",
"]",
"=",
"self",
".",
"decode_quorum",
"(",
"getattr",
"(",
"msg",
",",
"prop",
")",
")",
"if",
"msg",
".",
"HasField",
"(",
"'repl'",
")",
":",
"props",
"[",
"'repl'",
"]",
"=",
"REPL_TO_PY",
"[",
"msg",
".",
"repl",
"]",
"return",
"props"
] |
Decodes the protobuf bucket properties message into a dict.
:param msg: the protobuf message to decode
:type msg: riak.pb.riak_pb2.RpbBucketProps
:rtype dict
|
[
"Decodes",
"the",
"protobuf",
"bucket",
"properties",
"message",
"into",
"a",
"dict",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L333-L358
|
18,872
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.encode_modfun
|
def encode_modfun(self, props, msg=None):
"""
Encodes a dict with 'mod' and 'fun' keys into a protobuf
modfun pair. Used in bucket properties.
:param props: the module/function pair
:type props: dict
:param msg: the protobuf message to fill
:type msg: riak.pb.riak_pb2.RpbModFun
:rtype riak.pb.riak_pb2.RpbModFun
"""
if msg is None:
msg = riak.pb.riak_pb2.RpbModFun()
msg.module = str_to_bytes(props['mod'])
msg.function = str_to_bytes(props['fun'])
return msg
|
python
|
def encode_modfun(self, props, msg=None):
"""
Encodes a dict with 'mod' and 'fun' keys into a protobuf
modfun pair. Used in bucket properties.
:param props: the module/function pair
:type props: dict
:param msg: the protobuf message to fill
:type msg: riak.pb.riak_pb2.RpbModFun
:rtype riak.pb.riak_pb2.RpbModFun
"""
if msg is None:
msg = riak.pb.riak_pb2.RpbModFun()
msg.module = str_to_bytes(props['mod'])
msg.function = str_to_bytes(props['fun'])
return msg
|
[
"def",
"encode_modfun",
"(",
"self",
",",
"props",
",",
"msg",
"=",
"None",
")",
":",
"if",
"msg",
"is",
"None",
":",
"msg",
"=",
"riak",
".",
"pb",
".",
"riak_pb2",
".",
"RpbModFun",
"(",
")",
"msg",
".",
"module",
"=",
"str_to_bytes",
"(",
"props",
"[",
"'mod'",
"]",
")",
"msg",
".",
"function",
"=",
"str_to_bytes",
"(",
"props",
"[",
"'fun'",
"]",
")",
"return",
"msg"
] |
Encodes a dict with 'mod' and 'fun' keys into a protobuf
modfun pair. Used in bucket properties.
:param props: the module/function pair
:type props: dict
:param msg: the protobuf message to fill
:type msg: riak.pb.riak_pb2.RpbModFun
:rtype riak.pb.riak_pb2.RpbModFun
|
[
"Encodes",
"a",
"dict",
"with",
"mod",
"and",
"fun",
"keys",
"into",
"a",
"protobuf",
"modfun",
"pair",
".",
"Used",
"in",
"bucket",
"properties",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L372-L387
|
18,873
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.encode_hooklist
|
def encode_hooklist(self, hooklist, msg):
"""
Encodes a list of commit hooks into their protobuf equivalent.
Used in bucket properties.
:param hooklist: a list of commit hooks
:type hooklist: list
:param msg: a protobuf field that is a list of commit hooks
"""
for hook in hooklist:
pbhook = msg.add()
self.encode_hook(hook, pbhook)
|
python
|
def encode_hooklist(self, hooklist, msg):
"""
Encodes a list of commit hooks into their protobuf equivalent.
Used in bucket properties.
:param hooklist: a list of commit hooks
:type hooklist: list
:param msg: a protobuf field that is a list of commit hooks
"""
for hook in hooklist:
pbhook = msg.add()
self.encode_hook(hook, pbhook)
|
[
"def",
"encode_hooklist",
"(",
"self",
",",
"hooklist",
",",
"msg",
")",
":",
"for",
"hook",
"in",
"hooklist",
":",
"pbhook",
"=",
"msg",
".",
"add",
"(",
")",
"self",
".",
"encode_hook",
"(",
"hook",
",",
"pbhook",
")"
] |
Encodes a list of commit hooks into their protobuf equivalent.
Used in bucket properties.
:param hooklist: a list of commit hooks
:type hooklist: list
:param msg: a protobuf field that is a list of commit hooks
|
[
"Encodes",
"a",
"list",
"of",
"commit",
"hooks",
"into",
"their",
"protobuf",
"equivalent",
".",
"Used",
"in",
"bucket",
"properties",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L400-L411
|
18,874
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.decode_hook
|
def decode_hook(self, hook):
"""
Decodes a protobuf commit hook message into a dict. Used in
bucket properties.
:param hook: the hook to decode
:type hook: riak.pb.riak_pb2.RpbCommitHook
:rtype dict
"""
if hook.HasField('modfun'):
return self.decode_modfun(hook.modfun)
else:
return {'name': bytes_to_str(hook.name)}
|
python
|
def decode_hook(self, hook):
"""
Decodes a protobuf commit hook message into a dict. Used in
bucket properties.
:param hook: the hook to decode
:type hook: riak.pb.riak_pb2.RpbCommitHook
:rtype dict
"""
if hook.HasField('modfun'):
return self.decode_modfun(hook.modfun)
else:
return {'name': bytes_to_str(hook.name)}
|
[
"def",
"decode_hook",
"(",
"self",
",",
"hook",
")",
":",
"if",
"hook",
".",
"HasField",
"(",
"'modfun'",
")",
":",
"return",
"self",
".",
"decode_modfun",
"(",
"hook",
".",
"modfun",
")",
"else",
":",
"return",
"{",
"'name'",
":",
"bytes_to_str",
"(",
"hook",
".",
"name",
")",
"}"
] |
Decodes a protobuf commit hook message into a dict. Used in
bucket properties.
:param hook: the hook to decode
:type hook: riak.pb.riak_pb2.RpbCommitHook
:rtype dict
|
[
"Decodes",
"a",
"protobuf",
"commit",
"hook",
"message",
"into",
"a",
"dict",
".",
"Used",
"in",
"bucket",
"properties",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L413-L425
|
18,875
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.encode_hook
|
def encode_hook(self, hook, msg):
"""
Encodes a commit hook dict into the protobuf message. Used in
bucket properties.
:param hook: the hook to encode
:type hook: dict
:param msg: the protobuf message to fill
:type msg: riak.pb.riak_pb2.RpbCommitHook
:rtype riak.pb.riak_pb2.RpbCommitHook
"""
if 'name' in hook:
msg.name = str_to_bytes(hook['name'])
else:
self.encode_modfun(hook, msg.modfun)
return msg
|
python
|
def encode_hook(self, hook, msg):
"""
Encodes a commit hook dict into the protobuf message. Used in
bucket properties.
:param hook: the hook to encode
:type hook: dict
:param msg: the protobuf message to fill
:type msg: riak.pb.riak_pb2.RpbCommitHook
:rtype riak.pb.riak_pb2.RpbCommitHook
"""
if 'name' in hook:
msg.name = str_to_bytes(hook['name'])
else:
self.encode_modfun(hook, msg.modfun)
return msg
|
[
"def",
"encode_hook",
"(",
"self",
",",
"hook",
",",
"msg",
")",
":",
"if",
"'name'",
"in",
"hook",
":",
"msg",
".",
"name",
"=",
"str_to_bytes",
"(",
"hook",
"[",
"'name'",
"]",
")",
"else",
":",
"self",
".",
"encode_modfun",
"(",
"hook",
",",
"msg",
".",
"modfun",
")",
"return",
"msg"
] |
Encodes a commit hook dict into the protobuf message. Used in
bucket properties.
:param hook: the hook to encode
:type hook: dict
:param msg: the protobuf message to fill
:type msg: riak.pb.riak_pb2.RpbCommitHook
:rtype riak.pb.riak_pb2.RpbCommitHook
|
[
"Encodes",
"a",
"commit",
"hook",
"dict",
"into",
"the",
"protobuf",
"message",
".",
"Used",
"in",
"bucket",
"properties",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L427-L442
|
18,876
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.encode_index_req
|
def encode_index_req(self, bucket, index, startkey, endkey=None,
return_terms=None, max_results=None,
continuation=None, timeout=None, term_regex=None,
streaming=False):
"""
Encodes a secondary index request into the protobuf message.
:param bucket: the bucket whose index to query
:type bucket: string
:param index: the index to query
:type index: string
:param startkey: the value or beginning of the range
:type startkey: integer, string
:param endkey: the end of the range
:type endkey: integer, string
:param return_terms: whether to return the index term with the key
:type return_terms: bool
:param max_results: the maximum number of results to return (page size)
:type max_results: integer
:param continuation: the opaque continuation returned from a
previous paginated request
:type continuation: string
:param timeout: a timeout value in milliseconds, or 'infinity'
:type timeout: int
:param term_regex: a regular expression used to filter index terms
:type term_regex: string
:param streaming: encode as streaming request
:type streaming: bool
:rtype riak.pb.riak_kv_pb2.RpbIndexReq
"""
req = riak.pb.riak_kv_pb2.RpbIndexReq(
bucket=str_to_bytes(bucket.name),
index=str_to_bytes(index))
self._add_bucket_type(req, bucket.bucket_type)
if endkey is not None:
req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.range
req.range_min = str_to_bytes(str(startkey))
req.range_max = str_to_bytes(str(endkey))
else:
req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.eq
req.key = str_to_bytes(str(startkey))
if return_terms is not None:
req.return_terms = return_terms
if max_results:
req.max_results = max_results
if continuation:
req.continuation = str_to_bytes(continuation)
if timeout:
if timeout == 'infinity':
req.timeout = 0
else:
req.timeout = timeout
if term_regex:
req.term_regex = str_to_bytes(term_regex)
req.stream = streaming
mc = riak.pb.messages.MSG_CODE_INDEX_REQ
rc = riak.pb.messages.MSG_CODE_INDEX_RESP
return Msg(mc, req.SerializeToString(), rc)
|
python
|
def encode_index_req(self, bucket, index, startkey, endkey=None,
return_terms=None, max_results=None,
continuation=None, timeout=None, term_regex=None,
streaming=False):
"""
Encodes a secondary index request into the protobuf message.
:param bucket: the bucket whose index to query
:type bucket: string
:param index: the index to query
:type index: string
:param startkey: the value or beginning of the range
:type startkey: integer, string
:param endkey: the end of the range
:type endkey: integer, string
:param return_terms: whether to return the index term with the key
:type return_terms: bool
:param max_results: the maximum number of results to return (page size)
:type max_results: integer
:param continuation: the opaque continuation returned from a
previous paginated request
:type continuation: string
:param timeout: a timeout value in milliseconds, or 'infinity'
:type timeout: int
:param term_regex: a regular expression used to filter index terms
:type term_regex: string
:param streaming: encode as streaming request
:type streaming: bool
:rtype riak.pb.riak_kv_pb2.RpbIndexReq
"""
req = riak.pb.riak_kv_pb2.RpbIndexReq(
bucket=str_to_bytes(bucket.name),
index=str_to_bytes(index))
self._add_bucket_type(req, bucket.bucket_type)
if endkey is not None:
req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.range
req.range_min = str_to_bytes(str(startkey))
req.range_max = str_to_bytes(str(endkey))
else:
req.qtype = riak.pb.riak_kv_pb2.RpbIndexReq.eq
req.key = str_to_bytes(str(startkey))
if return_terms is not None:
req.return_terms = return_terms
if max_results:
req.max_results = max_results
if continuation:
req.continuation = str_to_bytes(continuation)
if timeout:
if timeout == 'infinity':
req.timeout = 0
else:
req.timeout = timeout
if term_regex:
req.term_regex = str_to_bytes(term_regex)
req.stream = streaming
mc = riak.pb.messages.MSG_CODE_INDEX_REQ
rc = riak.pb.messages.MSG_CODE_INDEX_RESP
return Msg(mc, req.SerializeToString(), rc)
|
[
"def",
"encode_index_req",
"(",
"self",
",",
"bucket",
",",
"index",
",",
"startkey",
",",
"endkey",
"=",
"None",
",",
"return_terms",
"=",
"None",
",",
"max_results",
"=",
"None",
",",
"continuation",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"term_regex",
"=",
"None",
",",
"streaming",
"=",
"False",
")",
":",
"req",
"=",
"riak",
".",
"pb",
".",
"riak_kv_pb2",
".",
"RpbIndexReq",
"(",
"bucket",
"=",
"str_to_bytes",
"(",
"bucket",
".",
"name",
")",
",",
"index",
"=",
"str_to_bytes",
"(",
"index",
")",
")",
"self",
".",
"_add_bucket_type",
"(",
"req",
",",
"bucket",
".",
"bucket_type",
")",
"if",
"endkey",
"is",
"not",
"None",
":",
"req",
".",
"qtype",
"=",
"riak",
".",
"pb",
".",
"riak_kv_pb2",
".",
"RpbIndexReq",
".",
"range",
"req",
".",
"range_min",
"=",
"str_to_bytes",
"(",
"str",
"(",
"startkey",
")",
")",
"req",
".",
"range_max",
"=",
"str_to_bytes",
"(",
"str",
"(",
"endkey",
")",
")",
"else",
":",
"req",
".",
"qtype",
"=",
"riak",
".",
"pb",
".",
"riak_kv_pb2",
".",
"RpbIndexReq",
".",
"eq",
"req",
".",
"key",
"=",
"str_to_bytes",
"(",
"str",
"(",
"startkey",
")",
")",
"if",
"return_terms",
"is",
"not",
"None",
":",
"req",
".",
"return_terms",
"=",
"return_terms",
"if",
"max_results",
":",
"req",
".",
"max_results",
"=",
"max_results",
"if",
"continuation",
":",
"req",
".",
"continuation",
"=",
"str_to_bytes",
"(",
"continuation",
")",
"if",
"timeout",
":",
"if",
"timeout",
"==",
"'infinity'",
":",
"req",
".",
"timeout",
"=",
"0",
"else",
":",
"req",
".",
"timeout",
"=",
"timeout",
"if",
"term_regex",
":",
"req",
".",
"term_regex",
"=",
"str_to_bytes",
"(",
"term_regex",
")",
"req",
".",
"stream",
"=",
"streaming",
"mc",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_INDEX_REQ",
"rc",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_INDEX_RESP",
"return",
"Msg",
"(",
"mc",
",",
"req",
".",
"SerializeToString",
"(",
")",
",",
"rc",
")"
] |
Encodes a secondary index request into the protobuf message.
:param bucket: the bucket whose index to query
:type bucket: string
:param index: the index to query
:type index: string
:param startkey: the value or beginning of the range
:type startkey: integer, string
:param endkey: the end of the range
:type endkey: integer, string
:param return_terms: whether to return the index term with the key
:type return_terms: bool
:param max_results: the maximum number of results to return (page size)
:type max_results: integer
:param continuation: the opaque continuation returned from a
previous paginated request
:type continuation: string
:param timeout: a timeout value in milliseconds, or 'infinity'
:type timeout: int
:param term_regex: a regular expression used to filter index terms
:type term_regex: string
:param streaming: encode as streaming request
:type streaming: bool
:rtype riak.pb.riak_kv_pb2.RpbIndexReq
|
[
"Encodes",
"a",
"secondary",
"index",
"request",
"into",
"the",
"protobuf",
"message",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L444-L501
|
18,877
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.decode_search_index
|
def decode_search_index(self, index):
"""
Fills an RpbYokozunaIndex message with the appropriate data.
:param index: a yz index message
:type index: riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex
:rtype dict
"""
result = {}
result['name'] = bytes_to_str(index.name)
if index.HasField('schema'):
result['schema'] = bytes_to_str(index.schema)
if index.HasField('n_val'):
result['n_val'] = index.n_val
return result
|
python
|
def decode_search_index(self, index):
"""
Fills an RpbYokozunaIndex message with the appropriate data.
:param index: a yz index message
:type index: riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex
:rtype dict
"""
result = {}
result['name'] = bytes_to_str(index.name)
if index.HasField('schema'):
result['schema'] = bytes_to_str(index.schema)
if index.HasField('n_val'):
result['n_val'] = index.n_val
return result
|
[
"def",
"decode_search_index",
"(",
"self",
",",
"index",
")",
":",
"result",
"=",
"{",
"}",
"result",
"[",
"'name'",
"]",
"=",
"bytes_to_str",
"(",
"index",
".",
"name",
")",
"if",
"index",
".",
"HasField",
"(",
"'schema'",
")",
":",
"result",
"[",
"'schema'",
"]",
"=",
"bytes_to_str",
"(",
"index",
".",
"schema",
")",
"if",
"index",
".",
"HasField",
"(",
"'n_val'",
")",
":",
"result",
"[",
"'n_val'",
"]",
"=",
"index",
".",
"n_val",
"return",
"result"
] |
Fills an RpbYokozunaIndex message with the appropriate data.
:param index: a yz index message
:type index: riak.pb.riak_yokozuna_pb2.RpbYokozunaIndex
:rtype dict
|
[
"Fills",
"an",
"RpbYokozunaIndex",
"message",
"with",
"the",
"appropriate",
"data",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L519-L533
|
18,878
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.encode_timeseries_put
|
def encode_timeseries_put(self, tsobj):
"""
Fills an TsPutReq message with the appropriate data and
metadata from a TsObject.
:param tsobj: a TsObject
:type tsobj: TsObject
:param req: the protobuf message to fill
:type req: riak.pb.riak_ts_pb2.TsPutReq
"""
req = riak.pb.riak_ts_pb2.TsPutReq()
req.table = str_to_bytes(tsobj.table.name)
if tsobj.columns:
raise NotImplementedError("columns are not implemented yet")
if tsobj.rows and isinstance(tsobj.rows, list):
for row in tsobj.rows:
tsr = req.rows.add() # NB: type TsRow
if not isinstance(row, list):
raise ValueError("TsObject row must be a list of values")
for cell in row:
tsc = tsr.cells.add() # NB: type TsCell
self.encode_to_ts_cell(cell, tsc)
else:
raise RiakError("TsObject requires a list of rows")
mc = riak.pb.messages.MSG_CODE_TS_PUT_REQ
rc = riak.pb.messages.MSG_CODE_TS_PUT_RESP
return Msg(mc, req.SerializeToString(), rc)
|
python
|
def encode_timeseries_put(self, tsobj):
"""
Fills an TsPutReq message with the appropriate data and
metadata from a TsObject.
:param tsobj: a TsObject
:type tsobj: TsObject
:param req: the protobuf message to fill
:type req: riak.pb.riak_ts_pb2.TsPutReq
"""
req = riak.pb.riak_ts_pb2.TsPutReq()
req.table = str_to_bytes(tsobj.table.name)
if tsobj.columns:
raise NotImplementedError("columns are not implemented yet")
if tsobj.rows and isinstance(tsobj.rows, list):
for row in tsobj.rows:
tsr = req.rows.add() # NB: type TsRow
if not isinstance(row, list):
raise ValueError("TsObject row must be a list of values")
for cell in row:
tsc = tsr.cells.add() # NB: type TsCell
self.encode_to_ts_cell(cell, tsc)
else:
raise RiakError("TsObject requires a list of rows")
mc = riak.pb.messages.MSG_CODE_TS_PUT_REQ
rc = riak.pb.messages.MSG_CODE_TS_PUT_RESP
return Msg(mc, req.SerializeToString(), rc)
|
[
"def",
"encode_timeseries_put",
"(",
"self",
",",
"tsobj",
")",
":",
"req",
"=",
"riak",
".",
"pb",
".",
"riak_ts_pb2",
".",
"TsPutReq",
"(",
")",
"req",
".",
"table",
"=",
"str_to_bytes",
"(",
"tsobj",
".",
"table",
".",
"name",
")",
"if",
"tsobj",
".",
"columns",
":",
"raise",
"NotImplementedError",
"(",
"\"columns are not implemented yet\"",
")",
"if",
"tsobj",
".",
"rows",
"and",
"isinstance",
"(",
"tsobj",
".",
"rows",
",",
"list",
")",
":",
"for",
"row",
"in",
"tsobj",
".",
"rows",
":",
"tsr",
"=",
"req",
".",
"rows",
".",
"add",
"(",
")",
"# NB: type TsRow",
"if",
"not",
"isinstance",
"(",
"row",
",",
"list",
")",
":",
"raise",
"ValueError",
"(",
"\"TsObject row must be a list of values\"",
")",
"for",
"cell",
"in",
"row",
":",
"tsc",
"=",
"tsr",
".",
"cells",
".",
"add",
"(",
")",
"# NB: type TsCell",
"self",
".",
"encode_to_ts_cell",
"(",
"cell",
",",
"tsc",
")",
"else",
":",
"raise",
"RiakError",
"(",
"\"TsObject requires a list of rows\"",
")",
"mc",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_TS_PUT_REQ",
"rc",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_TS_PUT_RESP",
"return",
"Msg",
"(",
"mc",
",",
"req",
".",
"SerializeToString",
"(",
")",
",",
"rc",
")"
] |
Fills an TsPutReq message with the appropriate data and
metadata from a TsObject.
:param tsobj: a TsObject
:type tsobj: TsObject
:param req: the protobuf message to fill
:type req: riak.pb.riak_ts_pb2.TsPutReq
|
[
"Fills",
"an",
"TsPutReq",
"message",
"with",
"the",
"appropriate",
"data",
"and",
"metadata",
"from",
"a",
"TsObject",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L758-L787
|
18,879
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.decode_timeseries_row
|
def decode_timeseries_row(self, tsrow, tscols=None,
convert_timestamp=False):
"""
Decodes a TsRow into a list
:param tsrow: the protobuf TsRow to decode.
:type tsrow: riak.pb.riak_ts_pb2.TsRow
:param tscols: the protobuf TsColumn data to help decode.
:type tscols: list
:rtype list
"""
row = []
for i, cell in enumerate(tsrow.cells):
col = None
if tscols is not None:
col = tscols[i]
if cell.HasField('varchar_value'):
if col and not (col.type == TsColumnType.Value('VARCHAR') or
col.type == TsColumnType.Value('BLOB')):
raise TypeError('expected VARCHAR or BLOB column')
else:
row.append(cell.varchar_value)
elif cell.HasField('sint64_value'):
if col and col.type != TsColumnType.Value('SINT64'):
raise TypeError('expected SINT64 column')
else:
row.append(cell.sint64_value)
elif cell.HasField('double_value'):
if col and col.type != TsColumnType.Value('DOUBLE'):
raise TypeError('expected DOUBLE column')
else:
row.append(cell.double_value)
elif cell.HasField('timestamp_value'):
if col and col.type != TsColumnType.Value('TIMESTAMP'):
raise TypeError('expected TIMESTAMP column')
else:
dt = cell.timestamp_value
if convert_timestamp:
dt = datetime_from_unix_time_millis(
cell.timestamp_value)
row.append(dt)
elif cell.HasField('boolean_value'):
if col and col.type != TsColumnType.Value('BOOLEAN'):
raise TypeError('expected BOOLEAN column')
else:
row.append(cell.boolean_value)
else:
row.append(None)
return row
|
python
|
def decode_timeseries_row(self, tsrow, tscols=None,
convert_timestamp=False):
"""
Decodes a TsRow into a list
:param tsrow: the protobuf TsRow to decode.
:type tsrow: riak.pb.riak_ts_pb2.TsRow
:param tscols: the protobuf TsColumn data to help decode.
:type tscols: list
:rtype list
"""
row = []
for i, cell in enumerate(tsrow.cells):
col = None
if tscols is not None:
col = tscols[i]
if cell.HasField('varchar_value'):
if col and not (col.type == TsColumnType.Value('VARCHAR') or
col.type == TsColumnType.Value('BLOB')):
raise TypeError('expected VARCHAR or BLOB column')
else:
row.append(cell.varchar_value)
elif cell.HasField('sint64_value'):
if col and col.type != TsColumnType.Value('SINT64'):
raise TypeError('expected SINT64 column')
else:
row.append(cell.sint64_value)
elif cell.HasField('double_value'):
if col and col.type != TsColumnType.Value('DOUBLE'):
raise TypeError('expected DOUBLE column')
else:
row.append(cell.double_value)
elif cell.HasField('timestamp_value'):
if col and col.type != TsColumnType.Value('TIMESTAMP'):
raise TypeError('expected TIMESTAMP column')
else:
dt = cell.timestamp_value
if convert_timestamp:
dt = datetime_from_unix_time_millis(
cell.timestamp_value)
row.append(dt)
elif cell.HasField('boolean_value'):
if col and col.type != TsColumnType.Value('BOOLEAN'):
raise TypeError('expected BOOLEAN column')
else:
row.append(cell.boolean_value)
else:
row.append(None)
return row
|
[
"def",
"decode_timeseries_row",
"(",
"self",
",",
"tsrow",
",",
"tscols",
"=",
"None",
",",
"convert_timestamp",
"=",
"False",
")",
":",
"row",
"=",
"[",
"]",
"for",
"i",
",",
"cell",
"in",
"enumerate",
"(",
"tsrow",
".",
"cells",
")",
":",
"col",
"=",
"None",
"if",
"tscols",
"is",
"not",
"None",
":",
"col",
"=",
"tscols",
"[",
"i",
"]",
"if",
"cell",
".",
"HasField",
"(",
"'varchar_value'",
")",
":",
"if",
"col",
"and",
"not",
"(",
"col",
".",
"type",
"==",
"TsColumnType",
".",
"Value",
"(",
"'VARCHAR'",
")",
"or",
"col",
".",
"type",
"==",
"TsColumnType",
".",
"Value",
"(",
"'BLOB'",
")",
")",
":",
"raise",
"TypeError",
"(",
"'expected VARCHAR or BLOB column'",
")",
"else",
":",
"row",
".",
"append",
"(",
"cell",
".",
"varchar_value",
")",
"elif",
"cell",
".",
"HasField",
"(",
"'sint64_value'",
")",
":",
"if",
"col",
"and",
"col",
".",
"type",
"!=",
"TsColumnType",
".",
"Value",
"(",
"'SINT64'",
")",
":",
"raise",
"TypeError",
"(",
"'expected SINT64 column'",
")",
"else",
":",
"row",
".",
"append",
"(",
"cell",
".",
"sint64_value",
")",
"elif",
"cell",
".",
"HasField",
"(",
"'double_value'",
")",
":",
"if",
"col",
"and",
"col",
".",
"type",
"!=",
"TsColumnType",
".",
"Value",
"(",
"'DOUBLE'",
")",
":",
"raise",
"TypeError",
"(",
"'expected DOUBLE column'",
")",
"else",
":",
"row",
".",
"append",
"(",
"cell",
".",
"double_value",
")",
"elif",
"cell",
".",
"HasField",
"(",
"'timestamp_value'",
")",
":",
"if",
"col",
"and",
"col",
".",
"type",
"!=",
"TsColumnType",
".",
"Value",
"(",
"'TIMESTAMP'",
")",
":",
"raise",
"TypeError",
"(",
"'expected TIMESTAMP column'",
")",
"else",
":",
"dt",
"=",
"cell",
".",
"timestamp_value",
"if",
"convert_timestamp",
":",
"dt",
"=",
"datetime_from_unix_time_millis",
"(",
"cell",
".",
"timestamp_value",
")",
"row",
".",
"append",
"(",
"dt",
")",
"elif",
"cell",
".",
"HasField",
"(",
"'boolean_value'",
")",
":",
"if",
"col",
"and",
"col",
".",
"type",
"!=",
"TsColumnType",
".",
"Value",
"(",
"'BOOLEAN'",
")",
":",
"raise",
"TypeError",
"(",
"'expected BOOLEAN column'",
")",
"else",
":",
"row",
".",
"append",
"(",
"cell",
".",
"boolean_value",
")",
"else",
":",
"row",
".",
"append",
"(",
"None",
")",
"return",
"row"
] |
Decodes a TsRow into a list
:param tsrow: the protobuf TsRow to decode.
:type tsrow: riak.pb.riak_ts_pb2.TsRow
:param tscols: the protobuf TsColumn data to help decode.
:type tscols: list
:rtype list
|
[
"Decodes",
"a",
"TsRow",
"into",
"a",
"list"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L847-L895
|
18,880
|
basho/riak-python-client
|
riak/codecs/pbuf.py
|
PbufCodec.decode_preflist
|
def decode_preflist(self, item):
"""
Decodes a preflist response
:param preflist: a bucket/key preflist
:type preflist: list of
riak.pb.riak_kv_pb2.RpbBucketKeyPreflistItem
:rtype dict
"""
result = {'partition': item.partition,
'node': bytes_to_str(item.node),
'primary': item. primary}
return result
|
python
|
def decode_preflist(self, item):
"""
Decodes a preflist response
:param preflist: a bucket/key preflist
:type preflist: list of
riak.pb.riak_kv_pb2.RpbBucketKeyPreflistItem
:rtype dict
"""
result = {'partition': item.partition,
'node': bytes_to_str(item.node),
'primary': item. primary}
return result
|
[
"def",
"decode_preflist",
"(",
"self",
",",
"item",
")",
":",
"result",
"=",
"{",
"'partition'",
":",
"item",
".",
"partition",
",",
"'node'",
":",
"bytes_to_str",
"(",
"item",
".",
"node",
")",
",",
"'primary'",
":",
"item",
".",
"primary",
"}",
"return",
"result"
] |
Decodes a preflist response
:param preflist: a bucket/key preflist
:type preflist: list of
riak.pb.riak_kv_pb2.RpbBucketKeyPreflistItem
:rtype dict
|
[
"Decodes",
"a",
"preflist",
"response"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/codecs/pbuf.py#L897-L909
|
18,881
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.ping
|
def ping(self):
"""
Ping the remote server
"""
msg_code = riak.pb.messages.MSG_CODE_PING_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_ping()
resp_code, _ = self._request(msg, codec)
if resp_code == riak.pb.messages.MSG_CODE_PING_RESP:
return True
else:
return False
|
python
|
def ping(self):
"""
Ping the remote server
"""
msg_code = riak.pb.messages.MSG_CODE_PING_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_ping()
resp_code, _ = self._request(msg, codec)
if resp_code == riak.pb.messages.MSG_CODE_PING_RESP:
return True
else:
return False
|
[
"def",
"ping",
"(",
"self",
")",
":",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_PING_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"msg",
"=",
"codec",
".",
"encode_ping",
"(",
")",
"resp_code",
",",
"_",
"=",
"self",
".",
"_request",
"(",
"msg",
",",
"codec",
")",
"if",
"resp_code",
"==",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_PING_RESP",
":",
"return",
"True",
"else",
":",
"return",
"False"
] |
Ping the remote server
|
[
"Ping",
"the",
"remote",
"server"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L107-L118
|
18,882
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.get_server_info
|
def get_server_info(self):
"""
Get information about the server
"""
# NB: can't do it this way due to recursion
# codec = self._get_codec(ttb_supported=False)
codec = PbufCodec()
msg = Msg(riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, None,
riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP)
resp_code, resp = self._request(msg, codec)
return codec.decode_get_server_info(resp)
|
python
|
def get_server_info(self):
"""
Get information about the server
"""
# NB: can't do it this way due to recursion
# codec = self._get_codec(ttb_supported=False)
codec = PbufCodec()
msg = Msg(riak.pb.messages.MSG_CODE_GET_SERVER_INFO_REQ, None,
riak.pb.messages.MSG_CODE_GET_SERVER_INFO_RESP)
resp_code, resp = self._request(msg, codec)
return codec.decode_get_server_info(resp)
|
[
"def",
"get_server_info",
"(",
"self",
")",
":",
"# NB: can't do it this way due to recursion",
"# codec = self._get_codec(ttb_supported=False)",
"codec",
"=",
"PbufCodec",
"(",
")",
"msg",
"=",
"Msg",
"(",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_GET_SERVER_INFO_REQ",
",",
"None",
",",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_GET_SERVER_INFO_RESP",
")",
"resp_code",
",",
"resp",
"=",
"self",
".",
"_request",
"(",
"msg",
",",
"codec",
")",
"return",
"codec",
".",
"decode_get_server_info",
"(",
"resp",
")"
] |
Get information about the server
|
[
"Get",
"information",
"about",
"the",
"server"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L120-L130
|
18,883
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.get
|
def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None,
notfound_ok=None, head_only=False):
"""
Serialize get request and deserialize response
"""
msg_code = riak.pb.messages.MSG_CODE_GET_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_get(robj, r, pr,
timeout, basic_quorum,
notfound_ok, head_only)
resp_code, resp = self._request(msg, codec)
return codec.decode_get(robj, resp)
|
python
|
def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None,
notfound_ok=None, head_only=False):
"""
Serialize get request and deserialize response
"""
msg_code = riak.pb.messages.MSG_CODE_GET_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_get(robj, r, pr,
timeout, basic_quorum,
notfound_ok, head_only)
resp_code, resp = self._request(msg, codec)
return codec.decode_get(robj, resp)
|
[
"def",
"get",
"(",
"self",
",",
"robj",
",",
"r",
"=",
"None",
",",
"pr",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"basic_quorum",
"=",
"None",
",",
"notfound_ok",
"=",
"None",
",",
"head_only",
"=",
"False",
")",
":",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_GET_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"msg",
"=",
"codec",
".",
"encode_get",
"(",
"robj",
",",
"r",
",",
"pr",
",",
"timeout",
",",
"basic_quorum",
",",
"notfound_ok",
",",
"head_only",
")",
"resp_code",
",",
"resp",
"=",
"self",
".",
"_request",
"(",
"msg",
",",
"codec",
")",
"return",
"codec",
".",
"decode_get",
"(",
"robj",
",",
"resp",
")"
] |
Serialize get request and deserialize response
|
[
"Serialize",
"get",
"request",
"and",
"deserialize",
"response"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L149-L160
|
18,884
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.ts_stream_keys
|
def ts_stream_keys(self, table, timeout=None):
"""
Streams keys from a timeseries table, returning an iterator that
yields lists of keys.
"""
msg_code = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_timeseries_listkeysreq(table, timeout)
self._send_msg(msg.msg_code, msg.data)
return PbufTsKeyStream(self, codec, self._ts_convert_timestamp)
|
python
|
def ts_stream_keys(self, table, timeout=None):
"""
Streams keys from a timeseries table, returning an iterator that
yields lists of keys.
"""
msg_code = riak.pb.messages.MSG_CODE_TS_LIST_KEYS_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_timeseries_listkeysreq(table, timeout)
self._send_msg(msg.msg_code, msg.data)
return PbufTsKeyStream(self, codec, self._ts_convert_timestamp)
|
[
"def",
"ts_stream_keys",
"(",
"self",
",",
"table",
",",
"timeout",
"=",
"None",
")",
":",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_TS_LIST_KEYS_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"msg",
"=",
"codec",
".",
"encode_timeseries_listkeysreq",
"(",
"table",
",",
"timeout",
")",
"self",
".",
"_send_msg",
"(",
"msg",
".",
"msg_code",
",",
"msg",
".",
"data",
")",
"return",
"PbufTsKeyStream",
"(",
"self",
",",
"codec",
",",
"self",
".",
"_ts_convert_timestamp",
")"
] |
Streams keys from a timeseries table, returning an iterator that
yields lists of keys.
|
[
"Streams",
"keys",
"from",
"a",
"timeseries",
"table",
"returning",
"an",
"iterator",
"that",
"yields",
"lists",
"of",
"keys",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L212-L221
|
18,885
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.get_keys
|
def get_keys(self, bucket, timeout=None):
"""
Lists all keys within a bucket.
"""
msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ
codec = self._get_codec(msg_code)
stream = self.stream_keys(bucket, timeout=timeout)
return codec.decode_get_keys(stream)
|
python
|
def get_keys(self, bucket, timeout=None):
"""
Lists all keys within a bucket.
"""
msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ
codec = self._get_codec(msg_code)
stream = self.stream_keys(bucket, timeout=timeout)
return codec.decode_get_keys(stream)
|
[
"def",
"get_keys",
"(",
"self",
",",
"bucket",
",",
"timeout",
"=",
"None",
")",
":",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_LIST_KEYS_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"stream",
"=",
"self",
".",
"stream_keys",
"(",
"bucket",
",",
"timeout",
"=",
"timeout",
")",
"return",
"codec",
".",
"decode_get_keys",
"(",
"stream",
")"
] |
Lists all keys within a bucket.
|
[
"Lists",
"all",
"keys",
"within",
"a",
"bucket",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L231-L238
|
18,886
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.stream_keys
|
def stream_keys(self, bucket, timeout=None):
"""
Streams keys from a bucket, returning an iterator that yields
lists of keys.
"""
msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_stream_keys(bucket, timeout)
self._send_msg(msg.msg_code, msg.data)
return PbufKeyStream(self, codec)
|
python
|
def stream_keys(self, bucket, timeout=None):
"""
Streams keys from a bucket, returning an iterator that yields
lists of keys.
"""
msg_code = riak.pb.messages.MSG_CODE_LIST_KEYS_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_stream_keys(bucket, timeout)
self._send_msg(msg.msg_code, msg.data)
return PbufKeyStream(self, codec)
|
[
"def",
"stream_keys",
"(",
"self",
",",
"bucket",
",",
"timeout",
"=",
"None",
")",
":",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_LIST_KEYS_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"msg",
"=",
"codec",
".",
"encode_stream_keys",
"(",
"bucket",
",",
"timeout",
")",
"self",
".",
"_send_msg",
"(",
"msg",
".",
"msg_code",
",",
"msg",
".",
"data",
")",
"return",
"PbufKeyStream",
"(",
"self",
",",
"codec",
")"
] |
Streams keys from a bucket, returning an iterator that yields
lists of keys.
|
[
"Streams",
"keys",
"from",
"a",
"bucket",
"returning",
"an",
"iterator",
"that",
"yields",
"lists",
"of",
"keys",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L240-L249
|
18,887
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.get_buckets
|
def get_buckets(self, bucket_type=None, timeout=None):
"""
Serialize bucket listing request and deserialize response
"""
msg_code = riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_get_buckets(bucket_type,
timeout, streaming=False)
resp_code, resp = self._request(msg, codec)
return resp.buckets
|
python
|
def get_buckets(self, bucket_type=None, timeout=None):
"""
Serialize bucket listing request and deserialize response
"""
msg_code = riak.pb.messages.MSG_CODE_LIST_BUCKETS_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_get_buckets(bucket_type,
timeout, streaming=False)
resp_code, resp = self._request(msg, codec)
return resp.buckets
|
[
"def",
"get_buckets",
"(",
"self",
",",
"bucket_type",
"=",
"None",
",",
"timeout",
"=",
"None",
")",
":",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_LIST_BUCKETS_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"msg",
"=",
"codec",
".",
"encode_get_buckets",
"(",
"bucket_type",
",",
"timeout",
",",
"streaming",
"=",
"False",
")",
"resp_code",
",",
"resp",
"=",
"self",
".",
"_request",
"(",
"msg",
",",
"codec",
")",
"return",
"resp",
".",
"buckets"
] |
Serialize bucket listing request and deserialize response
|
[
"Serialize",
"bucket",
"listing",
"request",
"and",
"deserialize",
"response"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L251-L260
|
18,888
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.get_bucket_props
|
def get_bucket_props(self, bucket):
"""
Serialize bucket property request and deserialize response
"""
msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_get_bucket_props(bucket)
resp_code, resp = self._request(msg, codec)
return codec.decode_bucket_props(resp.props)
|
python
|
def get_bucket_props(self, bucket):
"""
Serialize bucket property request and deserialize response
"""
msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_get_bucket_props(bucket)
resp_code, resp = self._request(msg, codec)
return codec.decode_bucket_props(resp.props)
|
[
"def",
"get_bucket_props",
"(",
"self",
",",
"bucket",
")",
":",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_GET_BUCKET_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"msg",
"=",
"codec",
".",
"encode_get_bucket_props",
"(",
"bucket",
")",
"resp_code",
",",
"resp",
"=",
"self",
".",
"_request",
"(",
"msg",
",",
"codec",
")",
"return",
"codec",
".",
"decode_bucket_props",
"(",
"resp",
".",
"props",
")"
] |
Serialize bucket property request and deserialize response
|
[
"Serialize",
"bucket",
"property",
"request",
"and",
"deserialize",
"response"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L276-L284
|
18,889
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.set_bucket_props
|
def set_bucket_props(self, bucket, props):
"""
Serialize set bucket property request and deserialize response
"""
if not self.pb_all_bucket_props():
for key in props:
if key not in ('n_val', 'allow_mult'):
raise NotImplementedError('Server only supports n_val and '
'allow_mult properties over PBC')
msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_set_bucket_props(bucket, props)
resp_code, resp = self._request(msg, codec)
return True
|
python
|
def set_bucket_props(self, bucket, props):
"""
Serialize set bucket property request and deserialize response
"""
if not self.pb_all_bucket_props():
for key in props:
if key not in ('n_val', 'allow_mult'):
raise NotImplementedError('Server only supports n_val and '
'allow_mult properties over PBC')
msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_set_bucket_props(bucket, props)
resp_code, resp = self._request(msg, codec)
return True
|
[
"def",
"set_bucket_props",
"(",
"self",
",",
"bucket",
",",
"props",
")",
":",
"if",
"not",
"self",
".",
"pb_all_bucket_props",
"(",
")",
":",
"for",
"key",
"in",
"props",
":",
"if",
"key",
"not",
"in",
"(",
"'n_val'",
",",
"'allow_mult'",
")",
":",
"raise",
"NotImplementedError",
"(",
"'Server only supports n_val and '",
"'allow_mult properties over PBC'",
")",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_SET_BUCKET_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"msg",
"=",
"codec",
".",
"encode_set_bucket_props",
"(",
"bucket",
",",
"props",
")",
"resp_code",
",",
"resp",
"=",
"self",
".",
"_request",
"(",
"msg",
",",
"codec",
")",
"return",
"True"
] |
Serialize set bucket property request and deserialize response
|
[
"Serialize",
"set",
"bucket",
"property",
"request",
"and",
"deserialize",
"response"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L286-L299
|
18,890
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.clear_bucket_props
|
def clear_bucket_props(self, bucket):
"""
Clear bucket properties, resetting them to their defaults
"""
if not self.pb_clear_bucket_props():
return False
msg_code = riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_clear_bucket_props(bucket)
self._request(msg, codec)
return True
|
python
|
def clear_bucket_props(self, bucket):
"""
Clear bucket properties, resetting them to their defaults
"""
if not self.pb_clear_bucket_props():
return False
msg_code = riak.pb.messages.MSG_CODE_RESET_BUCKET_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_clear_bucket_props(bucket)
self._request(msg, codec)
return True
|
[
"def",
"clear_bucket_props",
"(",
"self",
",",
"bucket",
")",
":",
"if",
"not",
"self",
".",
"pb_clear_bucket_props",
"(",
")",
":",
"return",
"False",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_RESET_BUCKET_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"msg",
"=",
"codec",
".",
"encode_clear_bucket_props",
"(",
"bucket",
")",
"self",
".",
"_request",
"(",
"msg",
",",
"codec",
")",
"return",
"True"
] |
Clear bucket properties, resetting them to their defaults
|
[
"Clear",
"bucket",
"properties",
"resetting",
"them",
"to",
"their",
"defaults"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L301-L311
|
18,891
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.get_bucket_type_props
|
def get_bucket_type_props(self, bucket_type):
"""
Fetch bucket-type properties
"""
self._check_bucket_types(bucket_type)
msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_get_bucket_type_props(bucket_type)
resp_code, resp = self._request(msg, codec)
return codec.decode_bucket_props(resp.props)
|
python
|
def get_bucket_type_props(self, bucket_type):
"""
Fetch bucket-type properties
"""
self._check_bucket_types(bucket_type)
msg_code = riak.pb.messages.MSG_CODE_GET_BUCKET_TYPE_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_get_bucket_type_props(bucket_type)
resp_code, resp = self._request(msg, codec)
return codec.decode_bucket_props(resp.props)
|
[
"def",
"get_bucket_type_props",
"(",
"self",
",",
"bucket_type",
")",
":",
"self",
".",
"_check_bucket_types",
"(",
"bucket_type",
")",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_GET_BUCKET_TYPE_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"msg",
"=",
"codec",
".",
"encode_get_bucket_type_props",
"(",
"bucket_type",
")",
"resp_code",
",",
"resp",
"=",
"self",
".",
"_request",
"(",
"msg",
",",
"codec",
")",
"return",
"codec",
".",
"decode_bucket_props",
"(",
"resp",
".",
"props",
")"
] |
Fetch bucket-type properties
|
[
"Fetch",
"bucket",
"-",
"type",
"properties"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L313-L322
|
18,892
|
basho/riak-python-client
|
riak/transports/tcp/transport.py
|
TcpTransport.set_bucket_type_props
|
def set_bucket_type_props(self, bucket_type, props):
"""
Set bucket-type properties
"""
self._check_bucket_types(bucket_type)
msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_set_bucket_type_props(bucket_type, props)
resp_code, resp = self._request(msg, codec)
return True
|
python
|
def set_bucket_type_props(self, bucket_type, props):
"""
Set bucket-type properties
"""
self._check_bucket_types(bucket_type)
msg_code = riak.pb.messages.MSG_CODE_SET_BUCKET_TYPE_REQ
codec = self._get_codec(msg_code)
msg = codec.encode_set_bucket_type_props(bucket_type, props)
resp_code, resp = self._request(msg, codec)
return True
|
[
"def",
"set_bucket_type_props",
"(",
"self",
",",
"bucket_type",
",",
"props",
")",
":",
"self",
".",
"_check_bucket_types",
"(",
"bucket_type",
")",
"msg_code",
"=",
"riak",
".",
"pb",
".",
"messages",
".",
"MSG_CODE_SET_BUCKET_TYPE_REQ",
"codec",
"=",
"self",
".",
"_get_codec",
"(",
"msg_code",
")",
"msg",
"=",
"codec",
".",
"encode_set_bucket_type_props",
"(",
"bucket_type",
",",
"props",
")",
"resp_code",
",",
"resp",
"=",
"self",
".",
"_request",
"(",
"msg",
",",
"codec",
")",
"return",
"True"
] |
Set bucket-type properties
|
[
"Set",
"bucket",
"-",
"type",
"properties"
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/tcp/transport.py#L324-L333
|
18,893
|
basho/riak-python-client
|
riak/benchmark.py
|
print_report
|
def print_report(label, user, system, real):
"""
Prints the report of one step of a benchmark.
"""
print("{:<12s} {:12f} {:12f} ( {:12f} )".format(label,
user,
system,
real))
|
python
|
def print_report(label, user, system, real):
"""
Prints the report of one step of a benchmark.
"""
print("{:<12s} {:12f} {:12f} ( {:12f} )".format(label,
user,
system,
real))
|
[
"def",
"print_report",
"(",
"label",
",",
"user",
",",
"system",
",",
"real",
")",
":",
"print",
"(",
"\"{:<12s} {:12f} {:12f} ( {:12f} )\"",
".",
"format",
"(",
"label",
",",
"user",
",",
"system",
",",
"real",
")",
")"
] |
Prints the report of one step of a benchmark.
|
[
"Prints",
"the",
"report",
"of",
"one",
"step",
"of",
"a",
"benchmark",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/benchmark.py#L134-L141
|
18,894
|
basho/riak-python-client
|
riak/benchmark.py
|
Benchmark.next
|
def next(self):
"""
Runs the next iteration of the benchmark.
"""
if self.count == 0:
raise StopIteration
elif self.count > 1:
print_rehearsal_header()
else:
if self.rehearse:
gc.collect()
print("-" * 59)
print()
print_header()
self.count -= 1
return self
|
python
|
def next(self):
"""
Runs the next iteration of the benchmark.
"""
if self.count == 0:
raise StopIteration
elif self.count > 1:
print_rehearsal_header()
else:
if self.rehearse:
gc.collect()
print("-" * 59)
print()
print_header()
self.count -= 1
return self
|
[
"def",
"next",
"(",
"self",
")",
":",
"if",
"self",
".",
"count",
"==",
"0",
":",
"raise",
"StopIteration",
"elif",
"self",
".",
"count",
">",
"1",
":",
"print_rehearsal_header",
"(",
")",
"else",
":",
"if",
"self",
".",
"rehearse",
":",
"gc",
".",
"collect",
"(",
")",
"print",
"(",
"\"-\"",
"*",
"59",
")",
"print",
"(",
")",
"print_header",
"(",
")",
"self",
".",
"count",
"-=",
"1",
"return",
"self"
] |
Runs the next iteration of the benchmark.
|
[
"Runs",
"the",
"next",
"iteration",
"of",
"the",
"benchmark",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/benchmark.py#L96-L112
|
18,895
|
basho/riak-python-client
|
riak/mapreduce.py
|
RiakMapReduce.add_object
|
def add_object(self, obj):
"""
Adds a RiakObject to the inputs.
:param obj: the object to add
:type obj: RiakObject
:rtype: :class:`RiakMapReduce`
"""
return self.add_bucket_key_data(obj._bucket._name, obj._key, None)
|
python
|
def add_object(self, obj):
"""
Adds a RiakObject to the inputs.
:param obj: the object to add
:type obj: RiakObject
:rtype: :class:`RiakMapReduce`
"""
return self.add_bucket_key_data(obj._bucket._name, obj._key, None)
|
[
"def",
"add_object",
"(",
"self",
",",
"obj",
")",
":",
"return",
"self",
".",
"add_bucket_key_data",
"(",
"obj",
".",
"_bucket",
".",
"_name",
",",
"obj",
".",
"_key",
",",
"None",
")"
] |
Adds a RiakObject to the inputs.
:param obj: the object to add
:type obj: RiakObject
:rtype: :class:`RiakMapReduce`
|
[
"Adds",
"a",
"RiakObject",
"to",
"the",
"inputs",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L77-L85
|
18,896
|
basho/riak-python-client
|
riak/mapreduce.py
|
RiakMapReduce.add_bucket
|
def add_bucket(self, bucket, bucket_type=None):
"""
Adds all keys in a bucket to the inputs.
:param bucket: the bucket
:type bucket: string
:param bucket_type: Optional name of a bucket type
:type bucket_type: string, None
:rtype: :class:`RiakMapReduce`
"""
if not riak.disable_list_exceptions:
raise riak.ListError()
self._input_mode = 'bucket'
if isinstance(bucket, riak.RiakBucket):
if bucket.bucket_type.is_default():
self._inputs = {'bucket': bucket.name}
else:
self._inputs = {'bucket': [bucket.bucket_type.name,
bucket.name]}
elif bucket_type is not None and bucket_type != "default":
self._inputs = {'bucket': [bucket_type, bucket]}
else:
self._inputs = {'bucket': bucket}
return self
|
python
|
def add_bucket(self, bucket, bucket_type=None):
"""
Adds all keys in a bucket to the inputs.
:param bucket: the bucket
:type bucket: string
:param bucket_type: Optional name of a bucket type
:type bucket_type: string, None
:rtype: :class:`RiakMapReduce`
"""
if not riak.disable_list_exceptions:
raise riak.ListError()
self._input_mode = 'bucket'
if isinstance(bucket, riak.RiakBucket):
if bucket.bucket_type.is_default():
self._inputs = {'bucket': bucket.name}
else:
self._inputs = {'bucket': [bucket.bucket_type.name,
bucket.name]}
elif bucket_type is not None and bucket_type != "default":
self._inputs = {'bucket': [bucket_type, bucket]}
else:
self._inputs = {'bucket': bucket}
return self
|
[
"def",
"add_bucket",
"(",
"self",
",",
"bucket",
",",
"bucket_type",
"=",
"None",
")",
":",
"if",
"not",
"riak",
".",
"disable_list_exceptions",
":",
"raise",
"riak",
".",
"ListError",
"(",
")",
"self",
".",
"_input_mode",
"=",
"'bucket'",
"if",
"isinstance",
"(",
"bucket",
",",
"riak",
".",
"RiakBucket",
")",
":",
"if",
"bucket",
".",
"bucket_type",
".",
"is_default",
"(",
")",
":",
"self",
".",
"_inputs",
"=",
"{",
"'bucket'",
":",
"bucket",
".",
"name",
"}",
"else",
":",
"self",
".",
"_inputs",
"=",
"{",
"'bucket'",
":",
"[",
"bucket",
".",
"bucket_type",
".",
"name",
",",
"bucket",
".",
"name",
"]",
"}",
"elif",
"bucket_type",
"is",
"not",
"None",
"and",
"bucket_type",
"!=",
"\"default\"",
":",
"self",
".",
"_inputs",
"=",
"{",
"'bucket'",
":",
"[",
"bucket_type",
",",
"bucket",
"]",
"}",
"else",
":",
"self",
".",
"_inputs",
"=",
"{",
"'bucket'",
":",
"bucket",
"}",
"return",
"self"
] |
Adds all keys in a bucket to the inputs.
:param bucket: the bucket
:type bucket: string
:param bucket_type: Optional name of a bucket type
:type bucket_type: string, None
:rtype: :class:`RiakMapReduce`
|
[
"Adds",
"all",
"keys",
"in",
"a",
"bucket",
"to",
"the",
"inputs",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L121-L144
|
18,897
|
basho/riak-python-client
|
riak/mapreduce.py
|
RiakMapReduce.add_key_filters
|
def add_key_filters(self, key_filters):
"""
Adds key filters to the inputs.
:param key_filters: a list of filters
:type key_filters: list
:rtype: :class:`RiakMapReduce`
"""
if self._input_mode == 'query':
raise ValueError('Key filters are not supported in a query.')
self._key_filters.extend(key_filters)
return self
|
python
|
def add_key_filters(self, key_filters):
"""
Adds key filters to the inputs.
:param key_filters: a list of filters
:type key_filters: list
:rtype: :class:`RiakMapReduce`
"""
if self._input_mode == 'query':
raise ValueError('Key filters are not supported in a query.')
self._key_filters.extend(key_filters)
return self
|
[
"def",
"add_key_filters",
"(",
"self",
",",
"key_filters",
")",
":",
"if",
"self",
".",
"_input_mode",
"==",
"'query'",
":",
"raise",
"ValueError",
"(",
"'Key filters are not supported in a query.'",
")",
"self",
".",
"_key_filters",
".",
"extend",
"(",
"key_filters",
")",
"return",
"self"
] |
Adds key filters to the inputs.
:param key_filters: a list of filters
:type key_filters: list
:rtype: :class:`RiakMapReduce`
|
[
"Adds",
"key",
"filters",
"to",
"the",
"inputs",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L146-L158
|
18,898
|
basho/riak-python-client
|
riak/mapreduce.py
|
RiakMapReduce.add_key_filter
|
def add_key_filter(self, *args):
"""
Add a single key filter to the inputs.
:param args: a filter
:type args: list
:rtype: :class:`RiakMapReduce`
"""
if self._input_mode == 'query':
raise ValueError('Key filters are not supported in a query.')
self._key_filters.append(args)
return self
|
python
|
def add_key_filter(self, *args):
"""
Add a single key filter to the inputs.
:param args: a filter
:type args: list
:rtype: :class:`RiakMapReduce`
"""
if self._input_mode == 'query':
raise ValueError('Key filters are not supported in a query.')
self._key_filters.append(args)
return self
|
[
"def",
"add_key_filter",
"(",
"self",
",",
"*",
"args",
")",
":",
"if",
"self",
".",
"_input_mode",
"==",
"'query'",
":",
"raise",
"ValueError",
"(",
"'Key filters are not supported in a query.'",
")",
"self",
".",
"_key_filters",
".",
"append",
"(",
"args",
")",
"return",
"self"
] |
Add a single key filter to the inputs.
:param args: a filter
:type args: list
:rtype: :class:`RiakMapReduce`
|
[
"Add",
"a",
"single",
"key",
"filter",
"to",
"the",
"inputs",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L160-L172
|
18,899
|
basho/riak-python-client
|
riak/mapreduce.py
|
RiakMapReduce.reduce_sort
|
def reduce_sort(self, js_cmp=None, options=None):
"""
Adds the Javascript built-in ``Riak.reduceSort`` to the query
as a reduce phase.
:param js_cmp: A Javascript comparator function as specified by
Array.sort()
:type js_cmp: string
:param options: phase options, containing 'language', 'keep'
flag, and/or 'arg'.
:type options: dict
"""
if options is None:
options = dict()
if js_cmp:
options['arg'] = js_cmp
return self.reduce("Riak.reduceSort", options=options)
|
python
|
def reduce_sort(self, js_cmp=None, options=None):
"""
Adds the Javascript built-in ``Riak.reduceSort`` to the query
as a reduce phase.
:param js_cmp: A Javascript comparator function as specified by
Array.sort()
:type js_cmp: string
:param options: phase options, containing 'language', 'keep'
flag, and/or 'arg'.
:type options: dict
"""
if options is None:
options = dict()
if js_cmp:
options['arg'] = js_cmp
return self.reduce("Riak.reduceSort", options=options)
|
[
"def",
"reduce_sort",
"(",
"self",
",",
"js_cmp",
"=",
"None",
",",
"options",
"=",
"None",
")",
":",
"if",
"options",
"is",
"None",
":",
"options",
"=",
"dict",
"(",
")",
"if",
"js_cmp",
":",
"options",
"[",
"'arg'",
"]",
"=",
"js_cmp",
"return",
"self",
".",
"reduce",
"(",
"\"Riak.reduceSort\"",
",",
"options",
"=",
"options",
")"
] |
Adds the Javascript built-in ``Riak.reduceSort`` to the query
as a reduce phase.
:param js_cmp: A Javascript comparator function as specified by
Array.sort()
:type js_cmp: string
:param options: phase options, containing 'language', 'keep'
flag, and/or 'arg'.
:type options: dict
|
[
"Adds",
"the",
"Javascript",
"built",
"-",
"in",
"Riak",
".",
"reduceSort",
"to",
"the",
"query",
"as",
"a",
"reduce",
"phase",
"."
] |
91de13a16607cdf553d1a194e762734e3bec4231
|
https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/mapreduce.py#L448-L466
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.