id
int32 0
252k
| repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
list | docstring
stringlengths 3
17.3k
| docstring_tokens
list | sha
stringlengths 40
40
| url
stringlengths 87
242
|
|---|---|---|---|---|---|---|---|---|---|---|---|
236,800
|
manns/pyspread
|
pyspread/src/lib/_grid_cairo_renderer.py
|
GridCellBorderCairoRenderer.draw
|
def draw(self):
"""Draws cell border to context"""
# Lines should have a square cap to avoid ugly edges
self.context.set_line_cap(cairo.LINE_CAP_SQUARE)
self.context.save()
self.context.rectangle(*self.rect)
self.context.clip()
cell_borders = CellBorders(self.cell_attributes, self.key, self.rect)
borders = list(cell_borders.gen_all())
borders.sort(key=attrgetter('width', 'color'))
for border in borders:
border.draw(self.context)
self.context.restore()
|
python
|
def draw(self):
"""Draws cell border to context"""
# Lines should have a square cap to avoid ugly edges
self.context.set_line_cap(cairo.LINE_CAP_SQUARE)
self.context.save()
self.context.rectangle(*self.rect)
self.context.clip()
cell_borders = CellBorders(self.cell_attributes, self.key, self.rect)
borders = list(cell_borders.gen_all())
borders.sort(key=attrgetter('width', 'color'))
for border in borders:
border.draw(self.context)
self.context.restore()
|
[
"def",
"draw",
"(",
"self",
")",
":",
"# Lines should have a square cap to avoid ugly edges",
"self",
".",
"context",
".",
"set_line_cap",
"(",
"cairo",
".",
"LINE_CAP_SQUARE",
")",
"self",
".",
"context",
".",
"save",
"(",
")",
"self",
".",
"context",
".",
"rectangle",
"(",
"*",
"self",
".",
"rect",
")",
"self",
".",
"context",
".",
"clip",
"(",
")",
"cell_borders",
"=",
"CellBorders",
"(",
"self",
".",
"cell_attributes",
",",
"self",
".",
"key",
",",
"self",
".",
"rect",
")",
"borders",
"=",
"list",
"(",
"cell_borders",
".",
"gen_all",
"(",
")",
")",
"borders",
".",
"sort",
"(",
"key",
"=",
"attrgetter",
"(",
"'width'",
",",
"'color'",
")",
")",
"for",
"border",
"in",
"borders",
":",
"border",
".",
"draw",
"(",
"self",
".",
"context",
")",
"self",
".",
"context",
".",
"restore",
"(",
")"
] |
Draws cell border to context
|
[
"Draws",
"cell",
"border",
"to",
"context"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/lib/_grid_cairo_renderer.py#L1316-L1332
|
236,801
|
manns/pyspread
|
pyspread/src/gui/_grid_renderer.py
|
GridRenderer._draw_cursor
|
def _draw_cursor(self, dc, grid, row, col,
pen=None, brush=None):
"""Draws cursor as Rectangle in lower right corner"""
# If in full screen mode draw no cursor
if grid.main_window.IsFullScreen():
return
key = row, col, grid.current_table
rect = grid.CellToRect(row, col)
rect = self.get_merged_rect(grid, key, rect)
# Check if cell is invisible
if rect is None:
return
size = self.get_zoomed_size(1.0)
caret_length = int(min([rect.width, rect.height]) / 5.0)
color = get_color(config["text_color"])
if pen is None:
pen = wx.Pen(color)
if brush is None:
brush = wx.Brush(color)
pen.SetWidth(size)
# Inner right and lower borders
border_left = rect.x + size - 1
border_right = rect.x + rect.width - size - 1
border_upper = rect.y + size - 1
border_lower = rect.y + rect.height - size - 1
points_lr = [
(border_right, border_lower - caret_length),
(border_right, border_lower),
(border_right - caret_length, border_lower),
(border_right, border_lower),
]
points_ur = [
(border_right, border_upper + caret_length),
(border_right, border_upper),
(border_right - caret_length, border_upper),
(border_right, border_upper),
]
points_ul = [
(border_left, border_upper + caret_length),
(border_left, border_upper),
(border_left + caret_length, border_upper),
(border_left, border_upper),
]
points_ll = [
(border_left, border_lower - caret_length),
(border_left, border_lower),
(border_left + caret_length, border_lower),
(border_left, border_lower),
]
point_list = [points_lr, points_ur, points_ul, points_ll]
dc.DrawPolygonList(point_list, pens=pen, brushes=brush)
self.old_cursor_row_col = row, col
|
python
|
def _draw_cursor(self, dc, grid, row, col,
pen=None, brush=None):
"""Draws cursor as Rectangle in lower right corner"""
# If in full screen mode draw no cursor
if grid.main_window.IsFullScreen():
return
key = row, col, grid.current_table
rect = grid.CellToRect(row, col)
rect = self.get_merged_rect(grid, key, rect)
# Check if cell is invisible
if rect is None:
return
size = self.get_zoomed_size(1.0)
caret_length = int(min([rect.width, rect.height]) / 5.0)
color = get_color(config["text_color"])
if pen is None:
pen = wx.Pen(color)
if brush is None:
brush = wx.Brush(color)
pen.SetWidth(size)
# Inner right and lower borders
border_left = rect.x + size - 1
border_right = rect.x + rect.width - size - 1
border_upper = rect.y + size - 1
border_lower = rect.y + rect.height - size - 1
points_lr = [
(border_right, border_lower - caret_length),
(border_right, border_lower),
(border_right - caret_length, border_lower),
(border_right, border_lower),
]
points_ur = [
(border_right, border_upper + caret_length),
(border_right, border_upper),
(border_right - caret_length, border_upper),
(border_right, border_upper),
]
points_ul = [
(border_left, border_upper + caret_length),
(border_left, border_upper),
(border_left + caret_length, border_upper),
(border_left, border_upper),
]
points_ll = [
(border_left, border_lower - caret_length),
(border_left, border_lower),
(border_left + caret_length, border_lower),
(border_left, border_lower),
]
point_list = [points_lr, points_ur, points_ul, points_ll]
dc.DrawPolygonList(point_list, pens=pen, brushes=brush)
self.old_cursor_row_col = row, col
|
[
"def",
"_draw_cursor",
"(",
"self",
",",
"dc",
",",
"grid",
",",
"row",
",",
"col",
",",
"pen",
"=",
"None",
",",
"brush",
"=",
"None",
")",
":",
"# If in full screen mode draw no cursor",
"if",
"grid",
".",
"main_window",
".",
"IsFullScreen",
"(",
")",
":",
"return",
"key",
"=",
"row",
",",
"col",
",",
"grid",
".",
"current_table",
"rect",
"=",
"grid",
".",
"CellToRect",
"(",
"row",
",",
"col",
")",
"rect",
"=",
"self",
".",
"get_merged_rect",
"(",
"grid",
",",
"key",
",",
"rect",
")",
"# Check if cell is invisible",
"if",
"rect",
"is",
"None",
":",
"return",
"size",
"=",
"self",
".",
"get_zoomed_size",
"(",
"1.0",
")",
"caret_length",
"=",
"int",
"(",
"min",
"(",
"[",
"rect",
".",
"width",
",",
"rect",
".",
"height",
"]",
")",
"/",
"5.0",
")",
"color",
"=",
"get_color",
"(",
"config",
"[",
"\"text_color\"",
"]",
")",
"if",
"pen",
"is",
"None",
":",
"pen",
"=",
"wx",
".",
"Pen",
"(",
"color",
")",
"if",
"brush",
"is",
"None",
":",
"brush",
"=",
"wx",
".",
"Brush",
"(",
"color",
")",
"pen",
".",
"SetWidth",
"(",
"size",
")",
"# Inner right and lower borders",
"border_left",
"=",
"rect",
".",
"x",
"+",
"size",
"-",
"1",
"border_right",
"=",
"rect",
".",
"x",
"+",
"rect",
".",
"width",
"-",
"size",
"-",
"1",
"border_upper",
"=",
"rect",
".",
"y",
"+",
"size",
"-",
"1",
"border_lower",
"=",
"rect",
".",
"y",
"+",
"rect",
".",
"height",
"-",
"size",
"-",
"1",
"points_lr",
"=",
"[",
"(",
"border_right",
",",
"border_lower",
"-",
"caret_length",
")",
",",
"(",
"border_right",
",",
"border_lower",
")",
",",
"(",
"border_right",
"-",
"caret_length",
",",
"border_lower",
")",
",",
"(",
"border_right",
",",
"border_lower",
")",
",",
"]",
"points_ur",
"=",
"[",
"(",
"border_right",
",",
"border_upper",
"+",
"caret_length",
")",
",",
"(",
"border_right",
",",
"border_upper",
")",
",",
"(",
"border_right",
"-",
"caret_length",
",",
"border_upper",
")",
",",
"(",
"border_right",
",",
"border_upper",
")",
",",
"]",
"points_ul",
"=",
"[",
"(",
"border_left",
",",
"border_upper",
"+",
"caret_length",
")",
",",
"(",
"border_left",
",",
"border_upper",
")",
",",
"(",
"border_left",
"+",
"caret_length",
",",
"border_upper",
")",
",",
"(",
"border_left",
",",
"border_upper",
")",
",",
"]",
"points_ll",
"=",
"[",
"(",
"border_left",
",",
"border_lower",
"-",
"caret_length",
")",
",",
"(",
"border_left",
",",
"border_lower",
")",
",",
"(",
"border_left",
"+",
"caret_length",
",",
"border_lower",
")",
",",
"(",
"border_left",
",",
"border_lower",
")",
",",
"]",
"point_list",
"=",
"[",
"points_lr",
",",
"points_ur",
",",
"points_ul",
",",
"points_ll",
"]",
"dc",
".",
"DrawPolygonList",
"(",
"point_list",
",",
"pens",
"=",
"pen",
",",
"brushes",
"=",
"brush",
")",
"self",
".",
"old_cursor_row_col",
"=",
"row",
",",
"col"
] |
Draws cursor as Rectangle in lower right corner
|
[
"Draws",
"cursor",
"as",
"Rectangle",
"in",
"lower",
"right",
"corner"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_grid_renderer.py#L91-L157
|
236,802
|
manns/pyspread
|
pyspread/src/gui/_grid_renderer.py
|
GridRenderer.update_cursor
|
def update_cursor(self, dc, grid, row, col):
"""Whites out the old cursor and draws the new one"""
old_row, old_col = self.old_cursor_row_col
bgcolor = get_color(config["background_color"])
self._draw_cursor(dc, grid, old_row, old_col,
pen=wx.Pen(bgcolor), brush=wx.Brush(bgcolor))
self._draw_cursor(dc, grid, row, col)
|
python
|
def update_cursor(self, dc, grid, row, col):
"""Whites out the old cursor and draws the new one"""
old_row, old_col = self.old_cursor_row_col
bgcolor = get_color(config["background_color"])
self._draw_cursor(dc, grid, old_row, old_col,
pen=wx.Pen(bgcolor), brush=wx.Brush(bgcolor))
self._draw_cursor(dc, grid, row, col)
|
[
"def",
"update_cursor",
"(",
"self",
",",
"dc",
",",
"grid",
",",
"row",
",",
"col",
")",
":",
"old_row",
",",
"old_col",
"=",
"self",
".",
"old_cursor_row_col",
"bgcolor",
"=",
"get_color",
"(",
"config",
"[",
"\"background_color\"",
"]",
")",
"self",
".",
"_draw_cursor",
"(",
"dc",
",",
"grid",
",",
"old_row",
",",
"old_col",
",",
"pen",
"=",
"wx",
".",
"Pen",
"(",
"bgcolor",
")",
",",
"brush",
"=",
"wx",
".",
"Brush",
"(",
"bgcolor",
")",
")",
"self",
".",
"_draw_cursor",
"(",
"dc",
",",
"grid",
",",
"row",
",",
"col",
")"
] |
Whites out the old cursor and draws the new one
|
[
"Whites",
"out",
"the",
"old",
"cursor",
"and",
"draws",
"the",
"new",
"one"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_grid_renderer.py#L159-L168
|
236,803
|
manns/pyspread
|
pyspread/src/gui/_grid_renderer.py
|
GridRenderer.get_merged_rect
|
def get_merged_rect(self, grid, key, rect):
"""Returns cell rect for normal or merged cells and None for merged"""
row, col, tab = key
# Check if cell is merged:
cell_attributes = grid.code_array.cell_attributes
merge_area = cell_attributes[(row, col, tab)]["merge_area"]
if merge_area is None:
return rect
else:
# We have a merged cell
top, left, bottom, right = merge_area
# Are we drawing the top left cell?
if top == row and left == col:
# Set rect to merge area
ul_rect = grid.CellToRect(row, col)
br_rect = grid.CellToRect(bottom, right)
width = br_rect.x - ul_rect.x + br_rect.width
height = br_rect.y - ul_rect.y + br_rect.height
rect = wx.Rect(ul_rect.x, ul_rect.y, width, height)
return rect
|
python
|
def get_merged_rect(self, grid, key, rect):
"""Returns cell rect for normal or merged cells and None for merged"""
row, col, tab = key
# Check if cell is merged:
cell_attributes = grid.code_array.cell_attributes
merge_area = cell_attributes[(row, col, tab)]["merge_area"]
if merge_area is None:
return rect
else:
# We have a merged cell
top, left, bottom, right = merge_area
# Are we drawing the top left cell?
if top == row and left == col:
# Set rect to merge area
ul_rect = grid.CellToRect(row, col)
br_rect = grid.CellToRect(bottom, right)
width = br_rect.x - ul_rect.x + br_rect.width
height = br_rect.y - ul_rect.y + br_rect.height
rect = wx.Rect(ul_rect.x, ul_rect.y, width, height)
return rect
|
[
"def",
"get_merged_rect",
"(",
"self",
",",
"grid",
",",
"key",
",",
"rect",
")",
":",
"row",
",",
"col",
",",
"tab",
"=",
"key",
"# Check if cell is merged:",
"cell_attributes",
"=",
"grid",
".",
"code_array",
".",
"cell_attributes",
"merge_area",
"=",
"cell_attributes",
"[",
"(",
"row",
",",
"col",
",",
"tab",
")",
"]",
"[",
"\"merge_area\"",
"]",
"if",
"merge_area",
"is",
"None",
":",
"return",
"rect",
"else",
":",
"# We have a merged cell",
"top",
",",
"left",
",",
"bottom",
",",
"right",
"=",
"merge_area",
"# Are we drawing the top left cell?",
"if",
"top",
"==",
"row",
"and",
"left",
"==",
"col",
":",
"# Set rect to merge area",
"ul_rect",
"=",
"grid",
".",
"CellToRect",
"(",
"row",
",",
"col",
")",
"br_rect",
"=",
"grid",
".",
"CellToRect",
"(",
"bottom",
",",
"right",
")",
"width",
"=",
"br_rect",
".",
"x",
"-",
"ul_rect",
".",
"x",
"+",
"br_rect",
".",
"width",
"height",
"=",
"br_rect",
".",
"y",
"-",
"ul_rect",
".",
"y",
"+",
"br_rect",
".",
"height",
"rect",
"=",
"wx",
".",
"Rect",
"(",
"ul_rect",
".",
"x",
",",
"ul_rect",
".",
"y",
",",
"width",
",",
"height",
")",
"return",
"rect"
] |
Returns cell rect for normal or merged cells and None for merged
|
[
"Returns",
"cell",
"rect",
"for",
"normal",
"or",
"merged",
"cells",
"and",
"None",
"for",
"merged"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_grid_renderer.py#L175-L202
|
236,804
|
manns/pyspread
|
pyspread/src/gui/_grid_renderer.py
|
GridRenderer._get_drawn_rect
|
def _get_drawn_rect(self, grid, key, rect):
"""Replaces drawn rect if the one provided by wx is incorrect
This handles merged rects including those that are partly off screen.
"""
rect = self.get_merged_rect(grid, key, rect)
if rect is None:
# Merged cell is drawn
if grid.is_merged_cell_drawn(key):
# Merging cell is outside view
row, col, __ = key = self.get_merging_cell(grid, key)
rect = grid.CellToRect(row, col)
rect = self.get_merged_rect(grid, key, rect)
else:
return
return rect
|
python
|
def _get_drawn_rect(self, grid, key, rect):
"""Replaces drawn rect if the one provided by wx is incorrect
This handles merged rects including those that are partly off screen.
"""
rect = self.get_merged_rect(grid, key, rect)
if rect is None:
# Merged cell is drawn
if grid.is_merged_cell_drawn(key):
# Merging cell is outside view
row, col, __ = key = self.get_merging_cell(grid, key)
rect = grid.CellToRect(row, col)
rect = self.get_merged_rect(grid, key, rect)
else:
return
return rect
|
[
"def",
"_get_drawn_rect",
"(",
"self",
",",
"grid",
",",
"key",
",",
"rect",
")",
":",
"rect",
"=",
"self",
".",
"get_merged_rect",
"(",
"grid",
",",
"key",
",",
"rect",
")",
"if",
"rect",
"is",
"None",
":",
"# Merged cell is drawn",
"if",
"grid",
".",
"is_merged_cell_drawn",
"(",
"key",
")",
":",
"# Merging cell is outside view",
"row",
",",
"col",
",",
"__",
"=",
"key",
"=",
"self",
".",
"get_merging_cell",
"(",
"grid",
",",
"key",
")",
"rect",
"=",
"grid",
".",
"CellToRect",
"(",
"row",
",",
"col",
")",
"rect",
"=",
"self",
".",
"get_merged_rect",
"(",
"grid",
",",
"key",
",",
"rect",
")",
"else",
":",
"return",
"return",
"rect"
] |
Replaces drawn rect if the one provided by wx is incorrect
This handles merged rects including those that are partly off screen.
|
[
"Replaces",
"drawn",
"rect",
"if",
"the",
"one",
"provided",
"by",
"wx",
"is",
"incorrect"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_grid_renderer.py#L204-L222
|
236,805
|
manns/pyspread
|
pyspread/src/gui/_grid_renderer.py
|
GridRenderer._get_draw_cache_key
|
def _get_draw_cache_key(self, grid, key, drawn_rect, is_selected):
"""Returns key for the screen draw cache"""
row, col, tab = key
cell_attributes = grid.code_array.cell_attributes
zoomed_width = drawn_rect.width / self.zoom
zoomed_height = drawn_rect.height / self.zoom
# Button cells shall not be executed for preview
if grid.code_array.cell_attributes[key]["button_cell"]:
cell_preview = repr(grid.code_array(key))[:100]
__id = id(grid.code_array(key))
else:
cell_preview = repr(grid.code_array[key])[:100]
__id = id(grid.code_array[key])
sorted_keys = sorted(grid.code_array.cell_attributes[key].iteritems())
key_above_left = row - 1, col - 1, tab
key_above = row - 1, col, tab
key_above_right = row - 1, col + 1, tab
key_left = row, col - 1, tab
key_right = row, col + 1, tab
key_below_left = row + 1, col - 1, tab
key_below = row + 1, col, tab
borders = []
for k in [key, key_above_left, key_above, key_above_right,
key_left, key_right, key_below_left, key_below]:
borders.append(cell_attributes[k]["borderwidth_bottom"])
borders.append(cell_attributes[k]["borderwidth_right"])
borders.append(cell_attributes[k]["bordercolor_bottom"])
borders.append(cell_attributes[k]["bordercolor_right"])
return (zoomed_width, zoomed_height, is_selected, cell_preview, __id,
tuple(sorted_keys), tuple(borders))
|
python
|
def _get_draw_cache_key(self, grid, key, drawn_rect, is_selected):
"""Returns key for the screen draw cache"""
row, col, tab = key
cell_attributes = grid.code_array.cell_attributes
zoomed_width = drawn_rect.width / self.zoom
zoomed_height = drawn_rect.height / self.zoom
# Button cells shall not be executed for preview
if grid.code_array.cell_attributes[key]["button_cell"]:
cell_preview = repr(grid.code_array(key))[:100]
__id = id(grid.code_array(key))
else:
cell_preview = repr(grid.code_array[key])[:100]
__id = id(grid.code_array[key])
sorted_keys = sorted(grid.code_array.cell_attributes[key].iteritems())
key_above_left = row - 1, col - 1, tab
key_above = row - 1, col, tab
key_above_right = row - 1, col + 1, tab
key_left = row, col - 1, tab
key_right = row, col + 1, tab
key_below_left = row + 1, col - 1, tab
key_below = row + 1, col, tab
borders = []
for k in [key, key_above_left, key_above, key_above_right,
key_left, key_right, key_below_left, key_below]:
borders.append(cell_attributes[k]["borderwidth_bottom"])
borders.append(cell_attributes[k]["borderwidth_right"])
borders.append(cell_attributes[k]["bordercolor_bottom"])
borders.append(cell_attributes[k]["bordercolor_right"])
return (zoomed_width, zoomed_height, is_selected, cell_preview, __id,
tuple(sorted_keys), tuple(borders))
|
[
"def",
"_get_draw_cache_key",
"(",
"self",
",",
"grid",
",",
"key",
",",
"drawn_rect",
",",
"is_selected",
")",
":",
"row",
",",
"col",
",",
"tab",
"=",
"key",
"cell_attributes",
"=",
"grid",
".",
"code_array",
".",
"cell_attributes",
"zoomed_width",
"=",
"drawn_rect",
".",
"width",
"/",
"self",
".",
"zoom",
"zoomed_height",
"=",
"drawn_rect",
".",
"height",
"/",
"self",
".",
"zoom",
"# Button cells shall not be executed for preview",
"if",
"grid",
".",
"code_array",
".",
"cell_attributes",
"[",
"key",
"]",
"[",
"\"button_cell\"",
"]",
":",
"cell_preview",
"=",
"repr",
"(",
"grid",
".",
"code_array",
"(",
"key",
")",
")",
"[",
":",
"100",
"]",
"__id",
"=",
"id",
"(",
"grid",
".",
"code_array",
"(",
"key",
")",
")",
"else",
":",
"cell_preview",
"=",
"repr",
"(",
"grid",
".",
"code_array",
"[",
"key",
"]",
")",
"[",
":",
"100",
"]",
"__id",
"=",
"id",
"(",
"grid",
".",
"code_array",
"[",
"key",
"]",
")",
"sorted_keys",
"=",
"sorted",
"(",
"grid",
".",
"code_array",
".",
"cell_attributes",
"[",
"key",
"]",
".",
"iteritems",
"(",
")",
")",
"key_above_left",
"=",
"row",
"-",
"1",
",",
"col",
"-",
"1",
",",
"tab",
"key_above",
"=",
"row",
"-",
"1",
",",
"col",
",",
"tab",
"key_above_right",
"=",
"row",
"-",
"1",
",",
"col",
"+",
"1",
",",
"tab",
"key_left",
"=",
"row",
",",
"col",
"-",
"1",
",",
"tab",
"key_right",
"=",
"row",
",",
"col",
"+",
"1",
",",
"tab",
"key_below_left",
"=",
"row",
"+",
"1",
",",
"col",
"-",
"1",
",",
"tab",
"key_below",
"=",
"row",
"+",
"1",
",",
"col",
",",
"tab",
"borders",
"=",
"[",
"]",
"for",
"k",
"in",
"[",
"key",
",",
"key_above_left",
",",
"key_above",
",",
"key_above_right",
",",
"key_left",
",",
"key_right",
",",
"key_below_left",
",",
"key_below",
"]",
":",
"borders",
".",
"append",
"(",
"cell_attributes",
"[",
"k",
"]",
"[",
"\"borderwidth_bottom\"",
"]",
")",
"borders",
".",
"append",
"(",
"cell_attributes",
"[",
"k",
"]",
"[",
"\"borderwidth_right\"",
"]",
")",
"borders",
".",
"append",
"(",
"cell_attributes",
"[",
"k",
"]",
"[",
"\"bordercolor_bottom\"",
"]",
")",
"borders",
".",
"append",
"(",
"cell_attributes",
"[",
"k",
"]",
"[",
"\"bordercolor_right\"",
"]",
")",
"return",
"(",
"zoomed_width",
",",
"zoomed_height",
",",
"is_selected",
",",
"cell_preview",
",",
"__id",
",",
"tuple",
"(",
"sorted_keys",
")",
",",
"tuple",
"(",
"borders",
")",
")"
] |
Returns key for the screen draw cache
|
[
"Returns",
"key",
"for",
"the",
"screen",
"draw",
"cache"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_grid_renderer.py#L224-L261
|
236,806
|
manns/pyspread
|
pyspread/src/gui/_grid_renderer.py
|
GridRenderer._get_cairo_bmp
|
def _get_cairo_bmp(self, mdc, key, rect, is_selected, view_frozen):
"""Returns a wx.Bitmap of cell key in size rect"""
bmp = wx.EmptyBitmap(rect.width, rect.height)
mdc.SelectObject(bmp)
mdc.SetBackgroundMode(wx.SOLID)
mdc.SetBackground(wx.WHITE_BRUSH)
mdc.Clear()
mdc.SetDeviceOrigin(0, 0)
context = wx.lib.wxcairo.ContextFromDC(mdc)
context.save()
# Zoom context
zoom = self.zoom
context.scale(zoom, zoom)
# Set off cell renderer by 1/2 a pixel to avoid blurry lines
rect_tuple = \
-0.5, -0.5, rect.width / zoom + 0.5, rect.height / zoom + 0.5
spell_check = config["check_spelling"]
cell_renderer = GridCellCairoRenderer(context, self.data_array,
key, rect_tuple, view_frozen,
spell_check=spell_check)
# Draw cell
cell_renderer.draw()
# Draw selection if present
if is_selected:
context.set_source_rgba(*self.selection_color_tuple)
context.rectangle(*rect_tuple)
context.fill()
context.restore()
return bmp
|
python
|
def _get_cairo_bmp(self, mdc, key, rect, is_selected, view_frozen):
"""Returns a wx.Bitmap of cell key in size rect"""
bmp = wx.EmptyBitmap(rect.width, rect.height)
mdc.SelectObject(bmp)
mdc.SetBackgroundMode(wx.SOLID)
mdc.SetBackground(wx.WHITE_BRUSH)
mdc.Clear()
mdc.SetDeviceOrigin(0, 0)
context = wx.lib.wxcairo.ContextFromDC(mdc)
context.save()
# Zoom context
zoom = self.zoom
context.scale(zoom, zoom)
# Set off cell renderer by 1/2 a pixel to avoid blurry lines
rect_tuple = \
-0.5, -0.5, rect.width / zoom + 0.5, rect.height / zoom + 0.5
spell_check = config["check_spelling"]
cell_renderer = GridCellCairoRenderer(context, self.data_array,
key, rect_tuple, view_frozen,
spell_check=spell_check)
# Draw cell
cell_renderer.draw()
# Draw selection if present
if is_selected:
context.set_source_rgba(*self.selection_color_tuple)
context.rectangle(*rect_tuple)
context.fill()
context.restore()
return bmp
|
[
"def",
"_get_cairo_bmp",
"(",
"self",
",",
"mdc",
",",
"key",
",",
"rect",
",",
"is_selected",
",",
"view_frozen",
")",
":",
"bmp",
"=",
"wx",
".",
"EmptyBitmap",
"(",
"rect",
".",
"width",
",",
"rect",
".",
"height",
")",
"mdc",
".",
"SelectObject",
"(",
"bmp",
")",
"mdc",
".",
"SetBackgroundMode",
"(",
"wx",
".",
"SOLID",
")",
"mdc",
".",
"SetBackground",
"(",
"wx",
".",
"WHITE_BRUSH",
")",
"mdc",
".",
"Clear",
"(",
")",
"mdc",
".",
"SetDeviceOrigin",
"(",
"0",
",",
"0",
")",
"context",
"=",
"wx",
".",
"lib",
".",
"wxcairo",
".",
"ContextFromDC",
"(",
"mdc",
")",
"context",
".",
"save",
"(",
")",
"# Zoom context",
"zoom",
"=",
"self",
".",
"zoom",
"context",
".",
"scale",
"(",
"zoom",
",",
"zoom",
")",
"# Set off cell renderer by 1/2 a pixel to avoid blurry lines",
"rect_tuple",
"=",
"-",
"0.5",
",",
"-",
"0.5",
",",
"rect",
".",
"width",
"/",
"zoom",
"+",
"0.5",
",",
"rect",
".",
"height",
"/",
"zoom",
"+",
"0.5",
"spell_check",
"=",
"config",
"[",
"\"check_spelling\"",
"]",
"cell_renderer",
"=",
"GridCellCairoRenderer",
"(",
"context",
",",
"self",
".",
"data_array",
",",
"key",
",",
"rect_tuple",
",",
"view_frozen",
",",
"spell_check",
"=",
"spell_check",
")",
"# Draw cell",
"cell_renderer",
".",
"draw",
"(",
")",
"# Draw selection if present",
"if",
"is_selected",
":",
"context",
".",
"set_source_rgba",
"(",
"*",
"self",
".",
"selection_color_tuple",
")",
"context",
".",
"rectangle",
"(",
"*",
"rect_tuple",
")",
"context",
".",
"fill",
"(",
")",
"context",
".",
"restore",
"(",
")",
"return",
"bmp"
] |
Returns a wx.Bitmap of cell key in size rect
|
[
"Returns",
"a",
"wx",
".",
"Bitmap",
"of",
"cell",
"key",
"in",
"size",
"rect"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_grid_renderer.py#L263-L299
|
236,807
|
manns/pyspread
|
pyspread/src/gui/_grid_renderer.py
|
GridRenderer.Draw
|
def Draw(self, grid, attr, dc, rect, row, col, isSelected):
"""Draws the cell border and content using pycairo"""
key = row, col, grid.current_table
# If cell is merge draw the merging cell if invisibile
if grid.code_array.cell_attributes[key]["merge_area"]:
key = self.get_merging_cell(grid, key)
drawn_rect = self._get_drawn_rect(grid, key, rect)
if drawn_rect is None:
return
cell_cache_key = self._get_draw_cache_key(grid, key, drawn_rect,
isSelected)
mdc = wx.MemoryDC()
if vlc is not None and key in self.video_cells and \
grid.code_array.cell_attributes[key]["panel_cell"]:
# Update video position of previously created video panel
self.video_cells[key].SetClientRect(drawn_rect)
elif cell_cache_key in self.cell_cache:
mdc.SelectObject(self.cell_cache[cell_cache_key])
else:
code = grid.code_array(key)
if vlc is not None and code is not None and \
grid.code_array.cell_attributes[key]["panel_cell"]:
try:
# A panel is to be displayed
panel_cls = grid.code_array[key]
# Assert that we have a subclass of a wxPanel that we
# can instantiate
assert issubclass(panel_cls, wx.Panel)
video_panel = panel_cls(grid)
video_panel.SetClientRect(drawn_rect)
# Register video cell
self.video_cells[key] = video_panel
return
except Exception, err:
# Someting is wrong with the panel to be displayed
post_command_event(grid.main_window, self.StatusBarMsg,
text=unicode(err))
bmp = self._get_cairo_bmp(mdc, key, drawn_rect, isSelected,
grid._view_frozen)
else:
bmp = self._get_cairo_bmp(mdc, key, drawn_rect, isSelected,
grid._view_frozen)
# Put resulting bmp into cache
self.cell_cache[cell_cache_key] = bmp
dc.Blit(drawn_rect.x, drawn_rect.y,
drawn_rect.width, drawn_rect.height,
mdc, 0, 0, wx.COPY)
# Draw cursor
if grid.actions.cursor[:2] == (row, col):
self.update_cursor(dc, grid, row, col)
|
python
|
def Draw(self, grid, attr, dc, rect, row, col, isSelected):
"""Draws the cell border and content using pycairo"""
key = row, col, grid.current_table
# If cell is merge draw the merging cell if invisibile
if grid.code_array.cell_attributes[key]["merge_area"]:
key = self.get_merging_cell(grid, key)
drawn_rect = self._get_drawn_rect(grid, key, rect)
if drawn_rect is None:
return
cell_cache_key = self._get_draw_cache_key(grid, key, drawn_rect,
isSelected)
mdc = wx.MemoryDC()
if vlc is not None and key in self.video_cells and \
grid.code_array.cell_attributes[key]["panel_cell"]:
# Update video position of previously created video panel
self.video_cells[key].SetClientRect(drawn_rect)
elif cell_cache_key in self.cell_cache:
mdc.SelectObject(self.cell_cache[cell_cache_key])
else:
code = grid.code_array(key)
if vlc is not None and code is not None and \
grid.code_array.cell_attributes[key]["panel_cell"]:
try:
# A panel is to be displayed
panel_cls = grid.code_array[key]
# Assert that we have a subclass of a wxPanel that we
# can instantiate
assert issubclass(panel_cls, wx.Panel)
video_panel = panel_cls(grid)
video_panel.SetClientRect(drawn_rect)
# Register video cell
self.video_cells[key] = video_panel
return
except Exception, err:
# Someting is wrong with the panel to be displayed
post_command_event(grid.main_window, self.StatusBarMsg,
text=unicode(err))
bmp = self._get_cairo_bmp(mdc, key, drawn_rect, isSelected,
grid._view_frozen)
else:
bmp = self._get_cairo_bmp(mdc, key, drawn_rect, isSelected,
grid._view_frozen)
# Put resulting bmp into cache
self.cell_cache[cell_cache_key] = bmp
dc.Blit(drawn_rect.x, drawn_rect.y,
drawn_rect.width, drawn_rect.height,
mdc, 0, 0, wx.COPY)
# Draw cursor
if grid.actions.cursor[:2] == (row, col):
self.update_cursor(dc, grid, row, col)
|
[
"def",
"Draw",
"(",
"self",
",",
"grid",
",",
"attr",
",",
"dc",
",",
"rect",
",",
"row",
",",
"col",
",",
"isSelected",
")",
":",
"key",
"=",
"row",
",",
"col",
",",
"grid",
".",
"current_table",
"# If cell is merge draw the merging cell if invisibile",
"if",
"grid",
".",
"code_array",
".",
"cell_attributes",
"[",
"key",
"]",
"[",
"\"merge_area\"",
"]",
":",
"key",
"=",
"self",
".",
"get_merging_cell",
"(",
"grid",
",",
"key",
")",
"drawn_rect",
"=",
"self",
".",
"_get_drawn_rect",
"(",
"grid",
",",
"key",
",",
"rect",
")",
"if",
"drawn_rect",
"is",
"None",
":",
"return",
"cell_cache_key",
"=",
"self",
".",
"_get_draw_cache_key",
"(",
"grid",
",",
"key",
",",
"drawn_rect",
",",
"isSelected",
")",
"mdc",
"=",
"wx",
".",
"MemoryDC",
"(",
")",
"if",
"vlc",
"is",
"not",
"None",
"and",
"key",
"in",
"self",
".",
"video_cells",
"and",
"grid",
".",
"code_array",
".",
"cell_attributes",
"[",
"key",
"]",
"[",
"\"panel_cell\"",
"]",
":",
"# Update video position of previously created video panel",
"self",
".",
"video_cells",
"[",
"key",
"]",
".",
"SetClientRect",
"(",
"drawn_rect",
")",
"elif",
"cell_cache_key",
"in",
"self",
".",
"cell_cache",
":",
"mdc",
".",
"SelectObject",
"(",
"self",
".",
"cell_cache",
"[",
"cell_cache_key",
"]",
")",
"else",
":",
"code",
"=",
"grid",
".",
"code_array",
"(",
"key",
")",
"if",
"vlc",
"is",
"not",
"None",
"and",
"code",
"is",
"not",
"None",
"and",
"grid",
".",
"code_array",
".",
"cell_attributes",
"[",
"key",
"]",
"[",
"\"panel_cell\"",
"]",
":",
"try",
":",
"# A panel is to be displayed",
"panel_cls",
"=",
"grid",
".",
"code_array",
"[",
"key",
"]",
"# Assert that we have a subclass of a wxPanel that we",
"# can instantiate",
"assert",
"issubclass",
"(",
"panel_cls",
",",
"wx",
".",
"Panel",
")",
"video_panel",
"=",
"panel_cls",
"(",
"grid",
")",
"video_panel",
".",
"SetClientRect",
"(",
"drawn_rect",
")",
"# Register video cell",
"self",
".",
"video_cells",
"[",
"key",
"]",
"=",
"video_panel",
"return",
"except",
"Exception",
",",
"err",
":",
"# Someting is wrong with the panel to be displayed",
"post_command_event",
"(",
"grid",
".",
"main_window",
",",
"self",
".",
"StatusBarMsg",
",",
"text",
"=",
"unicode",
"(",
"err",
")",
")",
"bmp",
"=",
"self",
".",
"_get_cairo_bmp",
"(",
"mdc",
",",
"key",
",",
"drawn_rect",
",",
"isSelected",
",",
"grid",
".",
"_view_frozen",
")",
"else",
":",
"bmp",
"=",
"self",
".",
"_get_cairo_bmp",
"(",
"mdc",
",",
"key",
",",
"drawn_rect",
",",
"isSelected",
",",
"grid",
".",
"_view_frozen",
")",
"# Put resulting bmp into cache",
"self",
".",
"cell_cache",
"[",
"cell_cache_key",
"]",
"=",
"bmp",
"dc",
".",
"Blit",
"(",
"drawn_rect",
".",
"x",
",",
"drawn_rect",
".",
"y",
",",
"drawn_rect",
".",
"width",
",",
"drawn_rect",
".",
"height",
",",
"mdc",
",",
"0",
",",
"0",
",",
"wx",
".",
"COPY",
")",
"# Draw cursor",
"if",
"grid",
".",
"actions",
".",
"cursor",
"[",
":",
"2",
"]",
"==",
"(",
"row",
",",
"col",
")",
":",
"self",
".",
"update_cursor",
"(",
"dc",
",",
"grid",
",",
"row",
",",
"col",
")"
] |
Draws the cell border and content using pycairo
|
[
"Draws",
"the",
"cell",
"border",
"and",
"content",
"using",
"pycairo"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_grid_renderer.py#L301-L365
|
236,808
|
manns/pyspread
|
pyspread/src/lib/gpg.py
|
choose_key
|
def choose_key(gpg_private_keys):
"""Displays gpg key choice and returns key"""
uid_strings_fp = []
uid_string_fp2key = {}
current_key_index = None
for i, key in enumerate(gpg_private_keys):
fingerprint = key['fingerprint']
if fingerprint == config["gpg_key_fingerprint"]:
current_key_index = i
for uid_string in key['uids']:
uid_string_fp = '"' + uid_string + ' (' + fingerprint + ')'
uid_strings_fp.append(uid_string_fp)
uid_string_fp2key[uid_string_fp] = key
msg = _('Choose a GPG key for signing pyspread save files.\n'
'The GPG key must not have a passphrase set.')
dlg = wx.SingleChoiceDialog(None, msg, _('Choose key'), uid_strings_fp,
wx.CHOICEDLG_STYLE)
childlist = list(dlg.GetChildren())
childlist[-3].SetLabel(_("Use chosen key"))
childlist[-2].SetLabel(_("Create new key"))
if current_key_index is not None:
# Set choice to current key
dlg.SetSelection(current_key_index)
if dlg.ShowModal() == wx.ID_OK:
uid_string_fp = dlg.GetStringSelection()
key = uid_string_fp2key[uid_string_fp]
else:
key = None
dlg.Destroy()
return key
|
python
|
def choose_key(gpg_private_keys):
"""Displays gpg key choice and returns key"""
uid_strings_fp = []
uid_string_fp2key = {}
current_key_index = None
for i, key in enumerate(gpg_private_keys):
fingerprint = key['fingerprint']
if fingerprint == config["gpg_key_fingerprint"]:
current_key_index = i
for uid_string in key['uids']:
uid_string_fp = '"' + uid_string + ' (' + fingerprint + ')'
uid_strings_fp.append(uid_string_fp)
uid_string_fp2key[uid_string_fp] = key
msg = _('Choose a GPG key for signing pyspread save files.\n'
'The GPG key must not have a passphrase set.')
dlg = wx.SingleChoiceDialog(None, msg, _('Choose key'), uid_strings_fp,
wx.CHOICEDLG_STYLE)
childlist = list(dlg.GetChildren())
childlist[-3].SetLabel(_("Use chosen key"))
childlist[-2].SetLabel(_("Create new key"))
if current_key_index is not None:
# Set choice to current key
dlg.SetSelection(current_key_index)
if dlg.ShowModal() == wx.ID_OK:
uid_string_fp = dlg.GetStringSelection()
key = uid_string_fp2key[uid_string_fp]
else:
key = None
dlg.Destroy()
return key
|
[
"def",
"choose_key",
"(",
"gpg_private_keys",
")",
":",
"uid_strings_fp",
"=",
"[",
"]",
"uid_string_fp2key",
"=",
"{",
"}",
"current_key_index",
"=",
"None",
"for",
"i",
",",
"key",
"in",
"enumerate",
"(",
"gpg_private_keys",
")",
":",
"fingerprint",
"=",
"key",
"[",
"'fingerprint'",
"]",
"if",
"fingerprint",
"==",
"config",
"[",
"\"gpg_key_fingerprint\"",
"]",
":",
"current_key_index",
"=",
"i",
"for",
"uid_string",
"in",
"key",
"[",
"'uids'",
"]",
":",
"uid_string_fp",
"=",
"'\"'",
"+",
"uid_string",
"+",
"' ('",
"+",
"fingerprint",
"+",
"')'",
"uid_strings_fp",
".",
"append",
"(",
"uid_string_fp",
")",
"uid_string_fp2key",
"[",
"uid_string_fp",
"]",
"=",
"key",
"msg",
"=",
"_",
"(",
"'Choose a GPG key for signing pyspread save files.\\n'",
"'The GPG key must not have a passphrase set.'",
")",
"dlg",
"=",
"wx",
".",
"SingleChoiceDialog",
"(",
"None",
",",
"msg",
",",
"_",
"(",
"'Choose key'",
")",
",",
"uid_strings_fp",
",",
"wx",
".",
"CHOICEDLG_STYLE",
")",
"childlist",
"=",
"list",
"(",
"dlg",
".",
"GetChildren",
"(",
")",
")",
"childlist",
"[",
"-",
"3",
"]",
".",
"SetLabel",
"(",
"_",
"(",
"\"Use chosen key\"",
")",
")",
"childlist",
"[",
"-",
"2",
"]",
".",
"SetLabel",
"(",
"_",
"(",
"\"Create new key\"",
")",
")",
"if",
"current_key_index",
"is",
"not",
"None",
":",
"# Set choice to current key",
"dlg",
".",
"SetSelection",
"(",
"current_key_index",
")",
"if",
"dlg",
".",
"ShowModal",
"(",
")",
"==",
"wx",
".",
"ID_OK",
":",
"uid_string_fp",
"=",
"dlg",
".",
"GetStringSelection",
"(",
")",
"key",
"=",
"uid_string_fp2key",
"[",
"uid_string_fp",
"]",
"else",
":",
"key",
"=",
"None",
"dlg",
".",
"Destroy",
"(",
")",
"return",
"key"
] |
Displays gpg key choice and returns key
|
[
"Displays",
"gpg",
"key",
"choice",
"and",
"returns",
"key"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/lib/gpg.py#L53-L95
|
236,809
|
manns/pyspread
|
pyspread/src/lib/gpg.py
|
_register_key
|
def _register_key(fingerprint, gpg):
"""Registers key in config"""
for private_key in gpg.list_keys(True):
try:
if str(fingerprint) == private_key['fingerprint']:
config["gpg_key_fingerprint"] = \
repr(private_key['fingerprint'])
except KeyError:
pass
|
python
|
def _register_key(fingerprint, gpg):
"""Registers key in config"""
for private_key in gpg.list_keys(True):
try:
if str(fingerprint) == private_key['fingerprint']:
config["gpg_key_fingerprint"] = \
repr(private_key['fingerprint'])
except KeyError:
pass
|
[
"def",
"_register_key",
"(",
"fingerprint",
",",
"gpg",
")",
":",
"for",
"private_key",
"in",
"gpg",
".",
"list_keys",
"(",
"True",
")",
":",
"try",
":",
"if",
"str",
"(",
"fingerprint",
")",
"==",
"private_key",
"[",
"'fingerprint'",
"]",
":",
"config",
"[",
"\"gpg_key_fingerprint\"",
"]",
"=",
"repr",
"(",
"private_key",
"[",
"'fingerprint'",
"]",
")",
"except",
"KeyError",
":",
"pass"
] |
Registers key in config
|
[
"Registers",
"key",
"in",
"config"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/lib/gpg.py#L98-L107
|
236,810
|
manns/pyspread
|
pyspread/src/lib/gpg.py
|
has_no_password
|
def has_no_password(gpg_secret_keyid):
"""Returns True iif gpg_secret_key has a password"""
if gnupg is None:
return False
gpg = gnupg.GPG()
s = gpg.sign("", keyid=gpg_secret_keyid, passphrase="")
try:
return s.status == "signature created"
except AttributeError:
# This may happen on Windows
if hasattr(s, "stderr"):
return "GOOD_PASSPHRASE" in s.stderr
|
python
|
def has_no_password(gpg_secret_keyid):
"""Returns True iif gpg_secret_key has a password"""
if gnupg is None:
return False
gpg = gnupg.GPG()
s = gpg.sign("", keyid=gpg_secret_keyid, passphrase="")
try:
return s.status == "signature created"
except AttributeError:
# This may happen on Windows
if hasattr(s, "stderr"):
return "GOOD_PASSPHRASE" in s.stderr
|
[
"def",
"has_no_password",
"(",
"gpg_secret_keyid",
")",
":",
"if",
"gnupg",
"is",
"None",
":",
"return",
"False",
"gpg",
"=",
"gnupg",
".",
"GPG",
"(",
")",
"s",
"=",
"gpg",
".",
"sign",
"(",
"\"\"",
",",
"keyid",
"=",
"gpg_secret_keyid",
",",
"passphrase",
"=",
"\"\"",
")",
"try",
":",
"return",
"s",
".",
"status",
"==",
"\"signature created\"",
"except",
"AttributeError",
":",
"# This may happen on Windows",
"if",
"hasattr",
"(",
"s",
",",
"\"stderr\"",
")",
":",
"return",
"\"GOOD_PASSPHRASE\"",
"in",
"s",
".",
"stderr"
] |
Returns True iif gpg_secret_key has a password
|
[
"Returns",
"True",
"iif",
"gpg_secret_key",
"has",
"a",
"password"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/lib/gpg.py#L110-L124
|
236,811
|
manns/pyspread
|
pyspread/src/lib/gpg.py
|
genkey
|
def genkey(key_name=None):
"""Creates a new standard GPG key
Parameters
----------
ui: Bool
\tIf True, then a new key is created when required without user interaction
"""
if gnupg is None:
return
gpg_key_param_list = [
('key_type', 'DSA'),
('key_length', '2048'),
('subkey_type', 'ELG-E'),
('subkey_length', '2048'),
('expire_date', '0'),
]
gpg = gnupg.GPG()
gpg.encoding = 'utf-8'
# Check if standard key is already present
pyspread_key_fingerprint = config["gpg_key_fingerprint"]
gpg_private_keys = [key for key in gpg.list_keys(secret=True)
if has_no_password(key["keyid"])]
gpg_private_fingerprints = \
[key['fingerprint'] for key in gpg.list_keys(secret=True)
if has_no_password(key["keyid"])]
pyspread_key = None
for private_key, fingerprint in zip(gpg_private_keys,
gpg_private_fingerprints):
if str(pyspread_key_fingerprint) == fingerprint:
pyspread_key = private_key
if gpg_private_keys:
# If GPG are available, choose one
pyspread_key = choose_key(gpg_private_keys)
if pyspread_key:
# A key has been chosen
config["gpg_key_fingerprint"] = repr(pyspread_key['fingerprint'])
else:
# No key has been chosen --> Create new one
if key_name is None:
gpg_key_parameters = get_key_params_from_user(gpg_key_param_list)
if gpg_key_parameters is None:
# No name entered
return
else:
gpg_key_param_list.append(
('name_real', '{key_name}'.format(key_name=key_name)))
gpg_key_parameters = dict(gpg_key_param_list)
input_data = gpg.gen_key_input(**gpg_key_parameters)
# Generate key
# ------------
if key_name is None:
# Show information dialog
style = wx.ICON_INFORMATION | wx.DIALOG_NO_PARENT | wx.OK | \
wx.CANCEL
pyspread_key_uid = gpg_key_parameters["name_real"]
short_message = _("New GPG key").format(pyspread_key_uid)
message = _("After confirming this dialog, a new GPG key ") + \
_("'{key}' will be generated.").format(key=pyspread_key_uid) +\
_(" \n \nThis may take some time.\nPlease wait.")
dlg = wx.MessageDialog(None, message, short_message, style)
dlg.Centre()
if dlg.ShowModal() == wx.ID_OK:
dlg.Destroy()
gpg_key = gpg.gen_key(input_data)
_register_key(gpg_key, gpg)
fingerprint = gpg_key.fingerprint
else:
dlg.Destroy()
return
else:
gpg_key = gpg.gen_key(input_data)
_register_key(gpg_key, gpg)
fingerprint = gpg_key.fingerprint
return fingerprint
|
python
|
def genkey(key_name=None):
"""Creates a new standard GPG key
Parameters
----------
ui: Bool
\tIf True, then a new key is created when required without user interaction
"""
if gnupg is None:
return
gpg_key_param_list = [
('key_type', 'DSA'),
('key_length', '2048'),
('subkey_type', 'ELG-E'),
('subkey_length', '2048'),
('expire_date', '0'),
]
gpg = gnupg.GPG()
gpg.encoding = 'utf-8'
# Check if standard key is already present
pyspread_key_fingerprint = config["gpg_key_fingerprint"]
gpg_private_keys = [key for key in gpg.list_keys(secret=True)
if has_no_password(key["keyid"])]
gpg_private_fingerprints = \
[key['fingerprint'] for key in gpg.list_keys(secret=True)
if has_no_password(key["keyid"])]
pyspread_key = None
for private_key, fingerprint in zip(gpg_private_keys,
gpg_private_fingerprints):
if str(pyspread_key_fingerprint) == fingerprint:
pyspread_key = private_key
if gpg_private_keys:
# If GPG are available, choose one
pyspread_key = choose_key(gpg_private_keys)
if pyspread_key:
# A key has been chosen
config["gpg_key_fingerprint"] = repr(pyspread_key['fingerprint'])
else:
# No key has been chosen --> Create new one
if key_name is None:
gpg_key_parameters = get_key_params_from_user(gpg_key_param_list)
if gpg_key_parameters is None:
# No name entered
return
else:
gpg_key_param_list.append(
('name_real', '{key_name}'.format(key_name=key_name)))
gpg_key_parameters = dict(gpg_key_param_list)
input_data = gpg.gen_key_input(**gpg_key_parameters)
# Generate key
# ------------
if key_name is None:
# Show information dialog
style = wx.ICON_INFORMATION | wx.DIALOG_NO_PARENT | wx.OK | \
wx.CANCEL
pyspread_key_uid = gpg_key_parameters["name_real"]
short_message = _("New GPG key").format(pyspread_key_uid)
message = _("After confirming this dialog, a new GPG key ") + \
_("'{key}' will be generated.").format(key=pyspread_key_uid) +\
_(" \n \nThis may take some time.\nPlease wait.")
dlg = wx.MessageDialog(None, message, short_message, style)
dlg.Centre()
if dlg.ShowModal() == wx.ID_OK:
dlg.Destroy()
gpg_key = gpg.gen_key(input_data)
_register_key(gpg_key, gpg)
fingerprint = gpg_key.fingerprint
else:
dlg.Destroy()
return
else:
gpg_key = gpg.gen_key(input_data)
_register_key(gpg_key, gpg)
fingerprint = gpg_key.fingerprint
return fingerprint
|
[
"def",
"genkey",
"(",
"key_name",
"=",
"None",
")",
":",
"if",
"gnupg",
"is",
"None",
":",
"return",
"gpg_key_param_list",
"=",
"[",
"(",
"'key_type'",
",",
"'DSA'",
")",
",",
"(",
"'key_length'",
",",
"'2048'",
")",
",",
"(",
"'subkey_type'",
",",
"'ELG-E'",
")",
",",
"(",
"'subkey_length'",
",",
"'2048'",
")",
",",
"(",
"'expire_date'",
",",
"'0'",
")",
",",
"]",
"gpg",
"=",
"gnupg",
".",
"GPG",
"(",
")",
"gpg",
".",
"encoding",
"=",
"'utf-8'",
"# Check if standard key is already present",
"pyspread_key_fingerprint",
"=",
"config",
"[",
"\"gpg_key_fingerprint\"",
"]",
"gpg_private_keys",
"=",
"[",
"key",
"for",
"key",
"in",
"gpg",
".",
"list_keys",
"(",
"secret",
"=",
"True",
")",
"if",
"has_no_password",
"(",
"key",
"[",
"\"keyid\"",
"]",
")",
"]",
"gpg_private_fingerprints",
"=",
"[",
"key",
"[",
"'fingerprint'",
"]",
"for",
"key",
"in",
"gpg",
".",
"list_keys",
"(",
"secret",
"=",
"True",
")",
"if",
"has_no_password",
"(",
"key",
"[",
"\"keyid\"",
"]",
")",
"]",
"pyspread_key",
"=",
"None",
"for",
"private_key",
",",
"fingerprint",
"in",
"zip",
"(",
"gpg_private_keys",
",",
"gpg_private_fingerprints",
")",
":",
"if",
"str",
"(",
"pyspread_key_fingerprint",
")",
"==",
"fingerprint",
":",
"pyspread_key",
"=",
"private_key",
"if",
"gpg_private_keys",
":",
"# If GPG are available, choose one",
"pyspread_key",
"=",
"choose_key",
"(",
"gpg_private_keys",
")",
"if",
"pyspread_key",
":",
"# A key has been chosen",
"config",
"[",
"\"gpg_key_fingerprint\"",
"]",
"=",
"repr",
"(",
"pyspread_key",
"[",
"'fingerprint'",
"]",
")",
"else",
":",
"# No key has been chosen --> Create new one",
"if",
"key_name",
"is",
"None",
":",
"gpg_key_parameters",
"=",
"get_key_params_from_user",
"(",
"gpg_key_param_list",
")",
"if",
"gpg_key_parameters",
"is",
"None",
":",
"# No name entered",
"return",
"else",
":",
"gpg_key_param_list",
".",
"append",
"(",
"(",
"'name_real'",
",",
"'{key_name}'",
".",
"format",
"(",
"key_name",
"=",
"key_name",
")",
")",
")",
"gpg_key_parameters",
"=",
"dict",
"(",
"gpg_key_param_list",
")",
"input_data",
"=",
"gpg",
".",
"gen_key_input",
"(",
"*",
"*",
"gpg_key_parameters",
")",
"# Generate key",
"# ------------",
"if",
"key_name",
"is",
"None",
":",
"# Show information dialog",
"style",
"=",
"wx",
".",
"ICON_INFORMATION",
"|",
"wx",
".",
"DIALOG_NO_PARENT",
"|",
"wx",
".",
"OK",
"|",
"wx",
".",
"CANCEL",
"pyspread_key_uid",
"=",
"gpg_key_parameters",
"[",
"\"name_real\"",
"]",
"short_message",
"=",
"_",
"(",
"\"New GPG key\"",
")",
".",
"format",
"(",
"pyspread_key_uid",
")",
"message",
"=",
"_",
"(",
"\"After confirming this dialog, a new GPG key \"",
")",
"+",
"_",
"(",
"\"'{key}' will be generated.\"",
")",
".",
"format",
"(",
"key",
"=",
"pyspread_key_uid",
")",
"+",
"_",
"(",
"\" \\n \\nThis may take some time.\\nPlease wait.\"",
")",
"dlg",
"=",
"wx",
".",
"MessageDialog",
"(",
"None",
",",
"message",
",",
"short_message",
",",
"style",
")",
"dlg",
".",
"Centre",
"(",
")",
"if",
"dlg",
".",
"ShowModal",
"(",
")",
"==",
"wx",
".",
"ID_OK",
":",
"dlg",
".",
"Destroy",
"(",
")",
"gpg_key",
"=",
"gpg",
".",
"gen_key",
"(",
"input_data",
")",
"_register_key",
"(",
"gpg_key",
",",
"gpg",
")",
"fingerprint",
"=",
"gpg_key",
".",
"fingerprint",
"else",
":",
"dlg",
".",
"Destroy",
"(",
")",
"return",
"else",
":",
"gpg_key",
"=",
"gpg",
".",
"gen_key",
"(",
"input_data",
")",
"_register_key",
"(",
"gpg_key",
",",
"gpg",
")",
"fingerprint",
"=",
"gpg_key",
".",
"fingerprint",
"return",
"fingerprint"
] |
Creates a new standard GPG key
Parameters
----------
ui: Bool
\tIf True, then a new key is created when required without user interaction
|
[
"Creates",
"a",
"new",
"standard",
"GPG",
"key"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/lib/gpg.py#L127-L221
|
236,812
|
manns/pyspread
|
pyspread/src/lib/gpg.py
|
fingerprint2keyid
|
def fingerprint2keyid(fingerprint):
"""Returns keyid from fingerprint for private keys"""
if gnupg is None:
return
gpg = gnupg.GPG()
private_keys = gpg.list_keys(True)
keyid = None
for private_key in private_keys:
if private_key['fingerprint'] == config["gpg_key_fingerprint"]:
keyid = private_key['keyid']
break
return keyid
|
python
|
def fingerprint2keyid(fingerprint):
"""Returns keyid from fingerprint for private keys"""
if gnupg is None:
return
gpg = gnupg.GPG()
private_keys = gpg.list_keys(True)
keyid = None
for private_key in private_keys:
if private_key['fingerprint'] == config["gpg_key_fingerprint"]:
keyid = private_key['keyid']
break
return keyid
|
[
"def",
"fingerprint2keyid",
"(",
"fingerprint",
")",
":",
"if",
"gnupg",
"is",
"None",
":",
"return",
"gpg",
"=",
"gnupg",
".",
"GPG",
"(",
")",
"private_keys",
"=",
"gpg",
".",
"list_keys",
"(",
"True",
")",
"keyid",
"=",
"None",
"for",
"private_key",
"in",
"private_keys",
":",
"if",
"private_key",
"[",
"'fingerprint'",
"]",
"==",
"config",
"[",
"\"gpg_key_fingerprint\"",
"]",
":",
"keyid",
"=",
"private_key",
"[",
"'keyid'",
"]",
"break",
"return",
"keyid"
] |
Returns keyid from fingerprint for private keys
|
[
"Returns",
"keyid",
"from",
"fingerprint",
"for",
"private",
"keys"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/lib/gpg.py#L224-L239
|
236,813
|
manns/pyspread
|
pyspread/src/lib/gpg.py
|
sign
|
def sign(filename):
"""Returns detached signature for file"""
if gnupg is None:
return
gpg = gnupg.GPG()
with open(filename, "rb") as signfile:
keyid = fingerprint2keyid(config["gpg_key_fingerprint"])
if keyid is None:
msg = "No private key for GPG fingerprint '{}'."
raise ValueError(msg.format(config["gpg_key_fingerprint"]))
signed_data = gpg.sign_file(signfile, keyid=keyid, detach=True)
return signed_data
|
python
|
def sign(filename):
"""Returns detached signature for file"""
if gnupg is None:
return
gpg = gnupg.GPG()
with open(filename, "rb") as signfile:
keyid = fingerprint2keyid(config["gpg_key_fingerprint"])
if keyid is None:
msg = "No private key for GPG fingerprint '{}'."
raise ValueError(msg.format(config["gpg_key_fingerprint"]))
signed_data = gpg.sign_file(signfile, keyid=keyid, detach=True)
return signed_data
|
[
"def",
"sign",
"(",
"filename",
")",
":",
"if",
"gnupg",
"is",
"None",
":",
"return",
"gpg",
"=",
"gnupg",
".",
"GPG",
"(",
")",
"with",
"open",
"(",
"filename",
",",
"\"rb\"",
")",
"as",
"signfile",
":",
"keyid",
"=",
"fingerprint2keyid",
"(",
"config",
"[",
"\"gpg_key_fingerprint\"",
"]",
")",
"if",
"keyid",
"is",
"None",
":",
"msg",
"=",
"\"No private key for GPG fingerprint '{}'.\"",
"raise",
"ValueError",
"(",
"msg",
".",
"format",
"(",
"config",
"[",
"\"gpg_key_fingerprint\"",
"]",
")",
")",
"signed_data",
"=",
"gpg",
".",
"sign_file",
"(",
"signfile",
",",
"keyid",
"=",
"keyid",
",",
"detach",
"=",
"True",
")",
"return",
"signed_data"
] |
Returns detached signature for file
|
[
"Returns",
"detached",
"signature",
"for",
"file"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/lib/gpg.py#L242-L259
|
236,814
|
manns/pyspread
|
pyspread/src/lib/gpg.py
|
verify
|
def verify(sigfilename, filefilename=None):
"""Verifies a signature, returns True if successful else False."""
if gnupg is None:
return False
gpg = gnupg.GPG()
with open(sigfilename, "rb") as sigfile:
verified = gpg.verify_file(sigfile, filefilename)
pyspread_keyid = fingerprint2keyid(config["gpg_key_fingerprint"])
if verified.valid and verified.key_id == pyspread_keyid:
return True
return False
|
python
|
def verify(sigfilename, filefilename=None):
"""Verifies a signature, returns True if successful else False."""
if gnupg is None:
return False
gpg = gnupg.GPG()
with open(sigfilename, "rb") as sigfile:
verified = gpg.verify_file(sigfile, filefilename)
pyspread_keyid = fingerprint2keyid(config["gpg_key_fingerprint"])
if verified.valid and verified.key_id == pyspread_keyid:
return True
return False
|
[
"def",
"verify",
"(",
"sigfilename",
",",
"filefilename",
"=",
"None",
")",
":",
"if",
"gnupg",
"is",
"None",
":",
"return",
"False",
"gpg",
"=",
"gnupg",
".",
"GPG",
"(",
")",
"with",
"open",
"(",
"sigfilename",
",",
"\"rb\"",
")",
"as",
"sigfile",
":",
"verified",
"=",
"gpg",
".",
"verify_file",
"(",
"sigfile",
",",
"filefilename",
")",
"pyspread_keyid",
"=",
"fingerprint2keyid",
"(",
"config",
"[",
"\"gpg_key_fingerprint\"",
"]",
")",
"if",
"verified",
".",
"valid",
"and",
"verified",
".",
"key_id",
"==",
"pyspread_keyid",
":",
"return",
"True",
"return",
"False"
] |
Verifies a signature, returns True if successful else False.
|
[
"Verifies",
"a",
"signature",
"returns",
"True",
"if",
"successful",
"else",
"False",
"."
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/lib/gpg.py#L262-L278
|
236,815
|
manns/pyspread
|
pyspread/src/model/model.py
|
CellAttributes._len_table_cache
|
def _len_table_cache(self):
"""Returns the length of the table cache"""
length = 0
for table in self._table_cache:
length += len(self._table_cache[table])
return length
|
python
|
def _len_table_cache(self):
"""Returns the length of the table cache"""
length = 0
for table in self._table_cache:
length += len(self._table_cache[table])
return length
|
[
"def",
"_len_table_cache",
"(",
"self",
")",
":",
"length",
"=",
"0",
"for",
"table",
"in",
"self",
".",
"_table_cache",
":",
"length",
"+=",
"len",
"(",
"self",
".",
"_table_cache",
"[",
"table",
"]",
")",
"return",
"length"
] |
Returns the length of the table cache
|
[
"Returns",
"the",
"length",
"of",
"the",
"table",
"cache"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L250-L258
|
236,816
|
manns/pyspread
|
pyspread/src/model/model.py
|
CellAttributes._update_table_cache
|
def _update_table_cache(self):
"""Clears and updates the table cache to be in sync with self"""
self._table_cache.clear()
for sel, tab, val in self:
try:
self._table_cache[tab].append((sel, val))
except KeyError:
self._table_cache[tab] = [(sel, val)]
assert len(self) == self._len_table_cache()
|
python
|
def _update_table_cache(self):
"""Clears and updates the table cache to be in sync with self"""
self._table_cache.clear()
for sel, tab, val in self:
try:
self._table_cache[tab].append((sel, val))
except KeyError:
self._table_cache[tab] = [(sel, val)]
assert len(self) == self._len_table_cache()
|
[
"def",
"_update_table_cache",
"(",
"self",
")",
":",
"self",
".",
"_table_cache",
".",
"clear",
"(",
")",
"for",
"sel",
",",
"tab",
",",
"val",
"in",
"self",
":",
"try",
":",
"self",
".",
"_table_cache",
"[",
"tab",
"]",
".",
"append",
"(",
"(",
"sel",
",",
"val",
")",
")",
"except",
"KeyError",
":",
"self",
".",
"_table_cache",
"[",
"tab",
"]",
"=",
"[",
"(",
"sel",
",",
"val",
")",
"]",
"assert",
"len",
"(",
"self",
")",
"==",
"self",
".",
"_len_table_cache",
"(",
")"
] |
Clears and updates the table cache to be in sync with self
|
[
"Clears",
"and",
"updates",
"the",
"table",
"cache",
"to",
"be",
"in",
"sync",
"with",
"self"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L260-L270
|
236,817
|
manns/pyspread
|
pyspread/src/model/model.py
|
CellAttributes.get_merging_cell
|
def get_merging_cell(self, key):
"""Returns key of cell that merges the cell key
or None if cell key not merged
Parameters
----------
key: 3-tuple of Integer
\tThe key of the cell that is merged
"""
row, col, tab = key
# Is cell merged
merge_area = self[key]["merge_area"]
if merge_area:
return merge_area[0], merge_area[1], tab
|
python
|
def get_merging_cell(self, key):
"""Returns key of cell that merges the cell key
or None if cell key not merged
Parameters
----------
key: 3-tuple of Integer
\tThe key of the cell that is merged
"""
row, col, tab = key
# Is cell merged
merge_area = self[key]["merge_area"]
if merge_area:
return merge_area[0], merge_area[1], tab
|
[
"def",
"get_merging_cell",
"(",
"self",
",",
"key",
")",
":",
"row",
",",
"col",
",",
"tab",
"=",
"key",
"# Is cell merged",
"merge_area",
"=",
"self",
"[",
"key",
"]",
"[",
"\"merge_area\"",
"]",
"if",
"merge_area",
":",
"return",
"merge_area",
"[",
"0",
"]",
",",
"merge_area",
"[",
"1",
"]",
",",
"tab"
] |
Returns key of cell that merges the cell key
or None if cell key not merged
Parameters
----------
key: 3-tuple of Integer
\tThe key of the cell that is merged
|
[
"Returns",
"key",
"of",
"cell",
"that",
"merges",
"the",
"cell",
"key"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L272-L290
|
236,818
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray._get_data
|
def _get_data(self):
"""Returns dict of data content.
Keys
----
shape: 3-tuple of Integer
\tGrid shape
grid: Dict of 3-tuples to strings
\tCell content
attributes: List of 3-tuples
\tCell attributes
row_heights: Dict of 2-tuples to float
\t(row, tab): row_height
col_widths: Dict of 2-tuples to float
\t(col, tab): col_width
macros: String
\tMacros from macro list
"""
data = {}
data["shape"] = self.shape
data["grid"] = {}.update(self.dict_grid)
data["attributes"] = [ca for ca in self.cell_attributes]
data["row_heights"] = self.row_heights
data["col_widths"] = self.col_widths
data["macros"] = self.macros
return data
|
python
|
def _get_data(self):
"""Returns dict of data content.
Keys
----
shape: 3-tuple of Integer
\tGrid shape
grid: Dict of 3-tuples to strings
\tCell content
attributes: List of 3-tuples
\tCell attributes
row_heights: Dict of 2-tuples to float
\t(row, tab): row_height
col_widths: Dict of 2-tuples to float
\t(col, tab): col_width
macros: String
\tMacros from macro list
"""
data = {}
data["shape"] = self.shape
data["grid"] = {}.update(self.dict_grid)
data["attributes"] = [ca for ca in self.cell_attributes]
data["row_heights"] = self.row_heights
data["col_widths"] = self.col_widths
data["macros"] = self.macros
return data
|
[
"def",
"_get_data",
"(",
"self",
")",
":",
"data",
"=",
"{",
"}",
"data",
"[",
"\"shape\"",
"]",
"=",
"self",
".",
"shape",
"data",
"[",
"\"grid\"",
"]",
"=",
"{",
"}",
".",
"update",
"(",
"self",
".",
"dict_grid",
")",
"data",
"[",
"\"attributes\"",
"]",
"=",
"[",
"ca",
"for",
"ca",
"in",
"self",
".",
"cell_attributes",
"]",
"data",
"[",
"\"row_heights\"",
"]",
"=",
"self",
".",
"row_heights",
"data",
"[",
"\"col_widths\"",
"]",
"=",
"self",
".",
"col_widths",
"data",
"[",
"\"macros\"",
"]",
"=",
"self",
".",
"macros",
"return",
"data"
] |
Returns dict of data content.
Keys
----
shape: 3-tuple of Integer
\tGrid shape
grid: Dict of 3-tuples to strings
\tCell content
attributes: List of 3-tuples
\tCell attributes
row_heights: Dict of 2-tuples to float
\t(row, tab): row_height
col_widths: Dict of 2-tuples to float
\t(col, tab): col_width
macros: String
\tMacros from macro list
|
[
"Returns",
"dict",
"of",
"data",
"content",
"."
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L388-L418
|
236,819
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray._set_data
|
def _set_data(self, **kwargs):
"""Sets data from given parameters
Old values are deleted.
If a paremeter is not given, nothing is changed.
Parameters
----------
shape: 3-tuple of Integer
\tGrid shape
grid: Dict of 3-tuples to strings
\tCell content
attributes: List of 3-tuples
\tCell attributes
row_heights: Dict of 2-tuples to float
\t(row, tab): row_height
col_widths: Dict of 2-tuples to float
\t(col, tab): col_width
macros: String
\tMacros from macro list
"""
if "shape" in kwargs:
self.shape = kwargs["shape"]
if "grid" in kwargs:
self.dict_grid.clear()
self.dict_grid.update(kwargs["grid"])
if "attributes" in kwargs:
self.attributes[:] = kwargs["attributes"]
if "row_heights" in kwargs:
self.row_heights = kwargs["row_heights"]
if "col_widths" in kwargs:
self.col_widths = kwargs["col_widths"]
if "macros" in kwargs:
self.macros = kwargs["macros"]
|
python
|
def _set_data(self, **kwargs):
"""Sets data from given parameters
Old values are deleted.
If a paremeter is not given, nothing is changed.
Parameters
----------
shape: 3-tuple of Integer
\tGrid shape
grid: Dict of 3-tuples to strings
\tCell content
attributes: List of 3-tuples
\tCell attributes
row_heights: Dict of 2-tuples to float
\t(row, tab): row_height
col_widths: Dict of 2-tuples to float
\t(col, tab): col_width
macros: String
\tMacros from macro list
"""
if "shape" in kwargs:
self.shape = kwargs["shape"]
if "grid" in kwargs:
self.dict_grid.clear()
self.dict_grid.update(kwargs["grid"])
if "attributes" in kwargs:
self.attributes[:] = kwargs["attributes"]
if "row_heights" in kwargs:
self.row_heights = kwargs["row_heights"]
if "col_widths" in kwargs:
self.col_widths = kwargs["col_widths"]
if "macros" in kwargs:
self.macros = kwargs["macros"]
|
[
"def",
"_set_data",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"\"shape\"",
"in",
"kwargs",
":",
"self",
".",
"shape",
"=",
"kwargs",
"[",
"\"shape\"",
"]",
"if",
"\"grid\"",
"in",
"kwargs",
":",
"self",
".",
"dict_grid",
".",
"clear",
"(",
")",
"self",
".",
"dict_grid",
".",
"update",
"(",
"kwargs",
"[",
"\"grid\"",
"]",
")",
"if",
"\"attributes\"",
"in",
"kwargs",
":",
"self",
".",
"attributes",
"[",
":",
"]",
"=",
"kwargs",
"[",
"\"attributes\"",
"]",
"if",
"\"row_heights\"",
"in",
"kwargs",
":",
"self",
".",
"row_heights",
"=",
"kwargs",
"[",
"\"row_heights\"",
"]",
"if",
"\"col_widths\"",
"in",
"kwargs",
":",
"self",
".",
"col_widths",
"=",
"kwargs",
"[",
"\"col_widths\"",
"]",
"if",
"\"macros\"",
"in",
"kwargs",
":",
"self",
".",
"macros",
"=",
"kwargs",
"[",
"\"macros\"",
"]"
] |
Sets data from given parameters
Old values are deleted.
If a paremeter is not given, nothing is changed.
Parameters
----------
shape: 3-tuple of Integer
\tGrid shape
grid: Dict of 3-tuples to strings
\tCell content
attributes: List of 3-tuples
\tCell attributes
row_heights: Dict of 2-tuples to float
\t(row, tab): row_height
col_widths: Dict of 2-tuples to float
\t(col, tab): col_width
macros: String
\tMacros from macro list
|
[
"Sets",
"data",
"from",
"given",
"parameters"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L420-L461
|
236,820
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray.get_row_height
|
def get_row_height(self, row, tab):
"""Returns row height"""
try:
return self.row_heights[(row, tab)]
except KeyError:
return config["default_row_height"]
|
python
|
def get_row_height(self, row, tab):
"""Returns row height"""
try:
return self.row_heights[(row, tab)]
except KeyError:
return config["default_row_height"]
|
[
"def",
"get_row_height",
"(",
"self",
",",
"row",
",",
"tab",
")",
":",
"try",
":",
"return",
"self",
".",
"row_heights",
"[",
"(",
"row",
",",
"tab",
")",
"]",
"except",
"KeyError",
":",
"return",
"config",
"[",
"\"default_row_height\"",
"]"
] |
Returns row height
|
[
"Returns",
"row",
"height"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L465-L472
|
236,821
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray.get_col_width
|
def get_col_width(self, col, tab):
"""Returns column width"""
try:
return self.col_widths[(col, tab)]
except KeyError:
return config["default_col_width"]
|
python
|
def get_col_width(self, col, tab):
"""Returns column width"""
try:
return self.col_widths[(col, tab)]
except KeyError:
return config["default_col_width"]
|
[
"def",
"get_col_width",
"(",
"self",
",",
"col",
",",
"tab",
")",
":",
"try",
":",
"return",
"self",
".",
"col_widths",
"[",
"(",
"col",
",",
"tab",
")",
"]",
"except",
"KeyError",
":",
"return",
"config",
"[",
"\"default_col_width\"",
"]"
] |
Returns column width
|
[
"Returns",
"column",
"width"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L474-L481
|
236,822
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray._set_shape
|
def _set_shape(self, shape):
"""Deletes all cells beyond new shape and sets dict_grid shape
Parameters
----------
shape: 3-tuple of Integer
\tTarget shape for grid
"""
# Delete each cell that is beyond new borders
old_shape = self.shape
deleted_cells = {}
if any(new_axis < old_axis
for new_axis, old_axis in zip(shape, old_shape)):
for key in self.dict_grid.keys():
if any(key_ele >= new_axis
for key_ele, new_axis in zip(key, shape)):
deleted_cells[key] = self.pop(key)
# Set dict_grid shape attribute
self.dict_grid.shape = shape
self._adjust_rowcol(0, 0, 0)
self._adjust_cell_attributes(0, 0, 0)
# Undo actions
yield "_set_shape"
self.shape = old_shape
for key in deleted_cells:
self[key] = deleted_cells[key]
|
python
|
def _set_shape(self, shape):
"""Deletes all cells beyond new shape and sets dict_grid shape
Parameters
----------
shape: 3-tuple of Integer
\tTarget shape for grid
"""
# Delete each cell that is beyond new borders
old_shape = self.shape
deleted_cells = {}
if any(new_axis < old_axis
for new_axis, old_axis in zip(shape, old_shape)):
for key in self.dict_grid.keys():
if any(key_ele >= new_axis
for key_ele, new_axis in zip(key, shape)):
deleted_cells[key] = self.pop(key)
# Set dict_grid shape attribute
self.dict_grid.shape = shape
self._adjust_rowcol(0, 0, 0)
self._adjust_cell_attributes(0, 0, 0)
# Undo actions
yield "_set_shape"
self.shape = old_shape
for key in deleted_cells:
self[key] = deleted_cells[key]
|
[
"def",
"_set_shape",
"(",
"self",
",",
"shape",
")",
":",
"# Delete each cell that is beyond new borders",
"old_shape",
"=",
"self",
".",
"shape",
"deleted_cells",
"=",
"{",
"}",
"if",
"any",
"(",
"new_axis",
"<",
"old_axis",
"for",
"new_axis",
",",
"old_axis",
"in",
"zip",
"(",
"shape",
",",
"old_shape",
")",
")",
":",
"for",
"key",
"in",
"self",
".",
"dict_grid",
".",
"keys",
"(",
")",
":",
"if",
"any",
"(",
"key_ele",
">=",
"new_axis",
"for",
"key_ele",
",",
"new_axis",
"in",
"zip",
"(",
"key",
",",
"shape",
")",
")",
":",
"deleted_cells",
"[",
"key",
"]",
"=",
"self",
".",
"pop",
"(",
"key",
")",
"# Set dict_grid shape attribute",
"self",
".",
"dict_grid",
".",
"shape",
"=",
"shape",
"self",
".",
"_adjust_rowcol",
"(",
"0",
",",
"0",
",",
"0",
")",
"self",
".",
"_adjust_cell_attributes",
"(",
"0",
",",
"0",
",",
"0",
")",
"# Undo actions",
"yield",
"\"_set_shape\"",
"self",
".",
"shape",
"=",
"old_shape",
"for",
"key",
"in",
"deleted_cells",
":",
"self",
"[",
"key",
"]",
"=",
"deleted_cells",
"[",
"key",
"]"
] |
Deletes all cells beyond new shape and sets dict_grid shape
Parameters
----------
shape: 3-tuple of Integer
\tTarget shape for grid
|
[
"Deletes",
"all",
"cells",
"beyond",
"new",
"shape",
"and",
"sets",
"dict_grid",
"shape"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L568-L603
|
236,823
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray.get_last_filled_cell
|
def get_last_filled_cell(self, table=None):
"""Returns key for the bottommost rightmost cell with content
Parameters
----------
table: Integer, defaults to None
\tLimit search to this table
"""
maxrow = 0
maxcol = 0
for row, col, tab in self.dict_grid:
if table is None or tab == table:
maxrow = max(row, maxrow)
maxcol = max(col, maxcol)
return maxrow, maxcol, table
|
python
|
def get_last_filled_cell(self, table=None):
"""Returns key for the bottommost rightmost cell with content
Parameters
----------
table: Integer, defaults to None
\tLimit search to this table
"""
maxrow = 0
maxcol = 0
for row, col, tab in self.dict_grid:
if table is None or tab == table:
maxrow = max(row, maxrow)
maxcol = max(col, maxcol)
return maxrow, maxcol, table
|
[
"def",
"get_last_filled_cell",
"(",
"self",
",",
"table",
"=",
"None",
")",
":",
"maxrow",
"=",
"0",
"maxcol",
"=",
"0",
"for",
"row",
",",
"col",
",",
"tab",
"in",
"self",
".",
"dict_grid",
":",
"if",
"table",
"is",
"None",
"or",
"tab",
"==",
"table",
":",
"maxrow",
"=",
"max",
"(",
"row",
",",
"maxrow",
")",
"maxcol",
"=",
"max",
"(",
"col",
",",
"maxcol",
")",
"return",
"maxrow",
",",
"maxcol",
",",
"table"
] |
Returns key for the bottommost rightmost cell with content
Parameters
----------
table: Integer, defaults to None
\tLimit search to this table
|
[
"Returns",
"key",
"for",
"the",
"bottommost",
"rightmost",
"cell",
"with",
"content"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L607-L625
|
236,824
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray.cell_array_generator
|
def cell_array_generator(self, key):
"""Generator traversing cells specified in key
Parameters
----------
key: Iterable of Integer or slice
\tThe key specifies the cell keys of the generator
"""
for i, key_ele in enumerate(key):
# Get first element of key that is a slice
if type(key_ele) is SliceType:
slc_keys = xrange(*key_ele.indices(self.dict_grid.shape[i]))
key_list = list(key)
key_list[i] = None
has_subslice = any(type(ele) is SliceType for ele in key_list)
for slc_key in slc_keys:
key_list[i] = slc_key
if has_subslice:
# If there is a slice left yield generator
yield self.cell_array_generator(key_list)
else:
# No slices? Yield value
yield self[tuple(key_list)]
break
|
python
|
def cell_array_generator(self, key):
"""Generator traversing cells specified in key
Parameters
----------
key: Iterable of Integer or slice
\tThe key specifies the cell keys of the generator
"""
for i, key_ele in enumerate(key):
# Get first element of key that is a slice
if type(key_ele) is SliceType:
slc_keys = xrange(*key_ele.indices(self.dict_grid.shape[i]))
key_list = list(key)
key_list[i] = None
has_subslice = any(type(ele) is SliceType for ele in key_list)
for slc_key in slc_keys:
key_list[i] = slc_key
if has_subslice:
# If there is a slice left yield generator
yield self.cell_array_generator(key_list)
else:
# No slices? Yield value
yield self[tuple(key_list)]
break
|
[
"def",
"cell_array_generator",
"(",
"self",
",",
"key",
")",
":",
"for",
"i",
",",
"key_ele",
"in",
"enumerate",
"(",
"key",
")",
":",
"# Get first element of key that is a slice",
"if",
"type",
"(",
"key_ele",
")",
"is",
"SliceType",
":",
"slc_keys",
"=",
"xrange",
"(",
"*",
"key_ele",
".",
"indices",
"(",
"self",
".",
"dict_grid",
".",
"shape",
"[",
"i",
"]",
")",
")",
"key_list",
"=",
"list",
"(",
"key",
")",
"key_list",
"[",
"i",
"]",
"=",
"None",
"has_subslice",
"=",
"any",
"(",
"type",
"(",
"ele",
")",
"is",
"SliceType",
"for",
"ele",
"in",
"key_list",
")",
"for",
"slc_key",
"in",
"slc_keys",
":",
"key_list",
"[",
"i",
"]",
"=",
"slc_key",
"if",
"has_subslice",
":",
"# If there is a slice left yield generator",
"yield",
"self",
".",
"cell_array_generator",
"(",
"key_list",
")",
"else",
":",
"# No slices? Yield value",
"yield",
"self",
"[",
"tuple",
"(",
"key_list",
")",
"]",
"break"
] |
Generator traversing cells specified in key
Parameters
----------
key: Iterable of Integer or slice
\tThe key specifies the cell keys of the generator
|
[
"Generator",
"traversing",
"cells",
"specified",
"in",
"key"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L720-L752
|
236,825
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray._shift_rowcol
|
def _shift_rowcol(self, insertion_point, no_to_insert):
"""Shifts row and column sizes when a table is inserted or deleted"""
# Shift row heights
new_row_heights = {}
del_row_heights = []
for row, tab in self.row_heights:
if tab > insertion_point:
new_row_heights[(row, tab + no_to_insert)] = \
self.row_heights[(row, tab)]
del_row_heights.append((row, tab))
for row, tab in new_row_heights:
self.set_row_height(row, tab, new_row_heights[(row, tab)])
for row, tab in del_row_heights:
if (row, tab) not in new_row_heights:
self.set_row_height(row, tab, None)
# Shift column widths
new_col_widths = {}
del_col_widths = []
for col, tab in self.col_widths:
if tab > insertion_point:
new_col_widths[(col, tab + no_to_insert)] = \
self.col_widths[(col, tab)]
del_col_widths.append((col, tab))
for col, tab in new_col_widths:
self.set_col_width(col, tab, new_col_widths[(col, tab)])
for col, tab in del_col_widths:
if (col, tab) not in new_col_widths:
self.set_col_width(col, tab, None)
|
python
|
def _shift_rowcol(self, insertion_point, no_to_insert):
"""Shifts row and column sizes when a table is inserted or deleted"""
# Shift row heights
new_row_heights = {}
del_row_heights = []
for row, tab in self.row_heights:
if tab > insertion_point:
new_row_heights[(row, tab + no_to_insert)] = \
self.row_heights[(row, tab)]
del_row_heights.append((row, tab))
for row, tab in new_row_heights:
self.set_row_height(row, tab, new_row_heights[(row, tab)])
for row, tab in del_row_heights:
if (row, tab) not in new_row_heights:
self.set_row_height(row, tab, None)
# Shift column widths
new_col_widths = {}
del_col_widths = []
for col, tab in self.col_widths:
if tab > insertion_point:
new_col_widths[(col, tab + no_to_insert)] = \
self.col_widths[(col, tab)]
del_col_widths.append((col, tab))
for col, tab in new_col_widths:
self.set_col_width(col, tab, new_col_widths[(col, tab)])
for col, tab in del_col_widths:
if (col, tab) not in new_col_widths:
self.set_col_width(col, tab, None)
|
[
"def",
"_shift_rowcol",
"(",
"self",
",",
"insertion_point",
",",
"no_to_insert",
")",
":",
"# Shift row heights",
"new_row_heights",
"=",
"{",
"}",
"del_row_heights",
"=",
"[",
"]",
"for",
"row",
",",
"tab",
"in",
"self",
".",
"row_heights",
":",
"if",
"tab",
">",
"insertion_point",
":",
"new_row_heights",
"[",
"(",
"row",
",",
"tab",
"+",
"no_to_insert",
")",
"]",
"=",
"self",
".",
"row_heights",
"[",
"(",
"row",
",",
"tab",
")",
"]",
"del_row_heights",
".",
"append",
"(",
"(",
"row",
",",
"tab",
")",
")",
"for",
"row",
",",
"tab",
"in",
"new_row_heights",
":",
"self",
".",
"set_row_height",
"(",
"row",
",",
"tab",
",",
"new_row_heights",
"[",
"(",
"row",
",",
"tab",
")",
"]",
")",
"for",
"row",
",",
"tab",
"in",
"del_row_heights",
":",
"if",
"(",
"row",
",",
"tab",
")",
"not",
"in",
"new_row_heights",
":",
"self",
".",
"set_row_height",
"(",
"row",
",",
"tab",
",",
"None",
")",
"# Shift column widths",
"new_col_widths",
"=",
"{",
"}",
"del_col_widths",
"=",
"[",
"]",
"for",
"col",
",",
"tab",
"in",
"self",
".",
"col_widths",
":",
"if",
"tab",
">",
"insertion_point",
":",
"new_col_widths",
"[",
"(",
"col",
",",
"tab",
"+",
"no_to_insert",
")",
"]",
"=",
"self",
".",
"col_widths",
"[",
"(",
"col",
",",
"tab",
")",
"]",
"del_col_widths",
".",
"append",
"(",
"(",
"col",
",",
"tab",
")",
")",
"for",
"col",
",",
"tab",
"in",
"new_col_widths",
":",
"self",
".",
"set_col_width",
"(",
"col",
",",
"tab",
",",
"new_col_widths",
"[",
"(",
"col",
",",
"tab",
")",
"]",
")",
"for",
"col",
",",
"tab",
"in",
"del_col_widths",
":",
"if",
"(",
"col",
",",
"tab",
")",
"not",
"in",
"new_col_widths",
":",
"self",
".",
"set_col_width",
"(",
"col",
",",
"tab",
",",
"None",
")"
] |
Shifts row and column sizes when a table is inserted or deleted
|
[
"Shifts",
"row",
"and",
"column",
"sizes",
"when",
"a",
"table",
"is",
"inserted",
"or",
"deleted"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L754-L791
|
236,826
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray._get_adjusted_merge_area
|
def _get_adjusted_merge_area(self, attrs, insertion_point, no_to_insert,
axis):
"""Returns updated merge area
Parameters
----------
attrs: Dict
\tCell attribute dictionary that shall be adjusted
insertion_point: Integer
\tPont on axis, before which insertion takes place
no_to_insert: Integer >= 0
\tNumber of rows/cols/tabs that shall be inserted
axis: Integer in range(2)
\tSpecifies number of dimension, i.e. 0 == row, 1 == col
"""
assert axis in range(2)
if "merge_area" not in attrs or attrs["merge_area"] is None:
return
top, left, bottom, right = attrs["merge_area"]
selection = Selection([(top, left)], [(bottom, right)], [], [], [])
selection.insert(insertion_point, no_to_insert, axis)
__top, __left = selection.block_tl[0]
__bottom, __right = selection.block_br[0]
# Adjust merge area if it is beyond the grid shape
rows, cols, tabs = self.shape
if __top < 0 and __bottom < 0 or __top >= rows and __bottom >= rows or\
__left < 0 and __right < 0 or __left >= cols and __right >= cols:
return
if __top < 0:
__top = 0
if __top >= rows:
__top = rows - 1
if __bottom < 0:
__bottom = 0
if __bottom >= rows:
__bottom = rows - 1
if __left < 0:
__left = 0
if __left >= cols:
__left = cols - 1
if __right < 0:
__right = 0
if __right >= cols:
__right = cols - 1
return __top, __left, __bottom, __right
|
python
|
def _get_adjusted_merge_area(self, attrs, insertion_point, no_to_insert,
axis):
"""Returns updated merge area
Parameters
----------
attrs: Dict
\tCell attribute dictionary that shall be adjusted
insertion_point: Integer
\tPont on axis, before which insertion takes place
no_to_insert: Integer >= 0
\tNumber of rows/cols/tabs that shall be inserted
axis: Integer in range(2)
\tSpecifies number of dimension, i.e. 0 == row, 1 == col
"""
assert axis in range(2)
if "merge_area" not in attrs or attrs["merge_area"] is None:
return
top, left, bottom, right = attrs["merge_area"]
selection = Selection([(top, left)], [(bottom, right)], [], [], [])
selection.insert(insertion_point, no_to_insert, axis)
__top, __left = selection.block_tl[0]
__bottom, __right = selection.block_br[0]
# Adjust merge area if it is beyond the grid shape
rows, cols, tabs = self.shape
if __top < 0 and __bottom < 0 or __top >= rows and __bottom >= rows or\
__left < 0 and __right < 0 or __left >= cols and __right >= cols:
return
if __top < 0:
__top = 0
if __top >= rows:
__top = rows - 1
if __bottom < 0:
__bottom = 0
if __bottom >= rows:
__bottom = rows - 1
if __left < 0:
__left = 0
if __left >= cols:
__left = cols - 1
if __right < 0:
__right = 0
if __right >= cols:
__right = cols - 1
return __top, __left, __bottom, __right
|
[
"def",
"_get_adjusted_merge_area",
"(",
"self",
",",
"attrs",
",",
"insertion_point",
",",
"no_to_insert",
",",
"axis",
")",
":",
"assert",
"axis",
"in",
"range",
"(",
"2",
")",
"if",
"\"merge_area\"",
"not",
"in",
"attrs",
"or",
"attrs",
"[",
"\"merge_area\"",
"]",
"is",
"None",
":",
"return",
"top",
",",
"left",
",",
"bottom",
",",
"right",
"=",
"attrs",
"[",
"\"merge_area\"",
"]",
"selection",
"=",
"Selection",
"(",
"[",
"(",
"top",
",",
"left",
")",
"]",
",",
"[",
"(",
"bottom",
",",
"right",
")",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"[",
"]",
")",
"selection",
".",
"insert",
"(",
"insertion_point",
",",
"no_to_insert",
",",
"axis",
")",
"__top",
",",
"__left",
"=",
"selection",
".",
"block_tl",
"[",
"0",
"]",
"__bottom",
",",
"__right",
"=",
"selection",
".",
"block_br",
"[",
"0",
"]",
"# Adjust merge area if it is beyond the grid shape",
"rows",
",",
"cols",
",",
"tabs",
"=",
"self",
".",
"shape",
"if",
"__top",
"<",
"0",
"and",
"__bottom",
"<",
"0",
"or",
"__top",
">=",
"rows",
"and",
"__bottom",
">=",
"rows",
"or",
"__left",
"<",
"0",
"and",
"__right",
"<",
"0",
"or",
"__left",
">=",
"cols",
"and",
"__right",
">=",
"cols",
":",
"return",
"if",
"__top",
"<",
"0",
":",
"__top",
"=",
"0",
"if",
"__top",
">=",
"rows",
":",
"__top",
"=",
"rows",
"-",
"1",
"if",
"__bottom",
"<",
"0",
":",
"__bottom",
"=",
"0",
"if",
"__bottom",
">=",
"rows",
":",
"__bottom",
"=",
"rows",
"-",
"1",
"if",
"__left",
"<",
"0",
":",
"__left",
"=",
"0",
"if",
"__left",
">=",
"cols",
":",
"__left",
"=",
"cols",
"-",
"1",
"if",
"__right",
"<",
"0",
":",
"__right",
"=",
"0",
"if",
"__right",
">=",
"cols",
":",
"__right",
"=",
"cols",
"-",
"1",
"return",
"__top",
",",
"__left",
",",
"__bottom",
",",
"__right"
] |
Returns updated merge area
Parameters
----------
attrs: Dict
\tCell attribute dictionary that shall be adjusted
insertion_point: Integer
\tPont on axis, before which insertion takes place
no_to_insert: Integer >= 0
\tNumber of rows/cols/tabs that shall be inserted
axis: Integer in range(2)
\tSpecifies number of dimension, i.e. 0 == row, 1 == col
|
[
"Returns",
"updated",
"merge",
"area"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L822-L883
|
236,827
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray.set_row_height
|
def set_row_height(self, row, tab, height):
"""Sets row height"""
try:
old_height = self.row_heights.pop((row, tab))
except KeyError:
old_height = None
if height is not None:
self.row_heights[(row, tab)] = float(height)
|
python
|
def set_row_height(self, row, tab, height):
"""Sets row height"""
try:
old_height = self.row_heights.pop((row, tab))
except KeyError:
old_height = None
if height is not None:
self.row_heights[(row, tab)] = float(height)
|
[
"def",
"set_row_height",
"(",
"self",
",",
"row",
",",
"tab",
",",
"height",
")",
":",
"try",
":",
"old_height",
"=",
"self",
".",
"row_heights",
".",
"pop",
"(",
"(",
"row",
",",
"tab",
")",
")",
"except",
"KeyError",
":",
"old_height",
"=",
"None",
"if",
"height",
"is",
"not",
"None",
":",
"self",
".",
"row_heights",
"[",
"(",
"row",
",",
"tab",
")",
"]",
"=",
"float",
"(",
"height",
")"
] |
Sets row height
|
[
"Sets",
"row",
"height"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1078-L1088
|
236,828
|
manns/pyspread
|
pyspread/src/model/model.py
|
DataArray.set_col_width
|
def set_col_width(self, col, tab, width):
"""Sets column width"""
try:
old_width = self.col_widths.pop((col, tab))
except KeyError:
old_width = None
if width is not None:
self.col_widths[(col, tab)] = float(width)
|
python
|
def set_col_width(self, col, tab, width):
"""Sets column width"""
try:
old_width = self.col_widths.pop((col, tab))
except KeyError:
old_width = None
if width is not None:
self.col_widths[(col, tab)] = float(width)
|
[
"def",
"set_col_width",
"(",
"self",
",",
"col",
",",
"tab",
",",
"width",
")",
":",
"try",
":",
"old_width",
"=",
"self",
".",
"col_widths",
".",
"pop",
"(",
"(",
"col",
",",
"tab",
")",
")",
"except",
"KeyError",
":",
"old_width",
"=",
"None",
"if",
"width",
"is",
"not",
"None",
":",
"self",
".",
"col_widths",
"[",
"(",
"col",
",",
"tab",
")",
"]",
"=",
"float",
"(",
"width",
")"
] |
Sets column width
|
[
"Sets",
"column",
"width"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1090-L1100
|
236,829
|
manns/pyspread
|
pyspread/src/model/model.py
|
CodeArray._make_nested_list
|
def _make_nested_list(self, gen):
"""Makes nested list from generator for creating numpy.array"""
res = []
for ele in gen:
if ele is None:
res.append(None)
elif not is_string_like(ele) and is_generator_like(ele):
# Nested generator
res.append(self._make_nested_list(ele))
else:
res.append(ele)
return res
|
python
|
def _make_nested_list(self, gen):
"""Makes nested list from generator for creating numpy.array"""
res = []
for ele in gen:
if ele is None:
res.append(None)
elif not is_string_like(ele) and is_generator_like(ele):
# Nested generator
res.append(self._make_nested_list(ele))
else:
res.append(ele)
return res
|
[
"def",
"_make_nested_list",
"(",
"self",
",",
"gen",
")",
":",
"res",
"=",
"[",
"]",
"for",
"ele",
"in",
"gen",
":",
"if",
"ele",
"is",
"None",
":",
"res",
".",
"append",
"(",
"None",
")",
"elif",
"not",
"is_string_like",
"(",
"ele",
")",
"and",
"is_generator_like",
"(",
"ele",
")",
":",
"# Nested generator",
"res",
".",
"append",
"(",
"self",
".",
"_make_nested_list",
"(",
"ele",
")",
")",
"else",
":",
"res",
".",
"append",
"(",
"ele",
")",
"return",
"res"
] |
Makes nested list from generator for creating numpy.array
|
[
"Makes",
"nested",
"list",
"from",
"generator",
"for",
"creating",
"numpy",
".",
"array"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1174-L1190
|
236,830
|
manns/pyspread
|
pyspread/src/model/model.py
|
CodeArray._get_assignment_target_end
|
def _get_assignment_target_end(self, ast_module):
"""Returns position of 1st char after assignment traget.
If there is no assignment, -1 is returned
If there are more than one of any ( expressions or assigments)
then a ValueError is raised.
"""
if len(ast_module.body) > 1:
raise ValueError("More than one expression or assignment.")
elif len(ast_module.body) > 0 and \
type(ast_module.body[0]) is ast.Assign:
if len(ast_module.body[0].targets) != 1:
raise ValueError("More than one assignment target.")
else:
return len(ast_module.body[0].targets[0].id)
return -1
|
python
|
def _get_assignment_target_end(self, ast_module):
"""Returns position of 1st char after assignment traget.
If there is no assignment, -1 is returned
If there are more than one of any ( expressions or assigments)
then a ValueError is raised.
"""
if len(ast_module.body) > 1:
raise ValueError("More than one expression or assignment.")
elif len(ast_module.body) > 0 and \
type(ast_module.body[0]) is ast.Assign:
if len(ast_module.body[0].targets) != 1:
raise ValueError("More than one assignment target.")
else:
return len(ast_module.body[0].targets[0].id)
return -1
|
[
"def",
"_get_assignment_target_end",
"(",
"self",
",",
"ast_module",
")",
":",
"if",
"len",
"(",
"ast_module",
".",
"body",
")",
">",
"1",
":",
"raise",
"ValueError",
"(",
"\"More than one expression or assignment.\"",
")",
"elif",
"len",
"(",
"ast_module",
".",
"body",
")",
">",
"0",
"and",
"type",
"(",
"ast_module",
".",
"body",
"[",
"0",
"]",
")",
"is",
"ast",
".",
"Assign",
":",
"if",
"len",
"(",
"ast_module",
".",
"body",
"[",
"0",
"]",
".",
"targets",
")",
"!=",
"1",
":",
"raise",
"ValueError",
"(",
"\"More than one assignment target.\"",
")",
"else",
":",
"return",
"len",
"(",
"ast_module",
".",
"body",
"[",
"0",
"]",
".",
"targets",
"[",
"0",
"]",
".",
"id",
")",
"return",
"-",
"1"
] |
Returns position of 1st char after assignment traget.
If there is no assignment, -1 is returned
If there are more than one of any ( expressions or assigments)
then a ValueError is raised.
|
[
"Returns",
"position",
"of",
"1st",
"char",
"after",
"assignment",
"traget",
"."
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1192-L1212
|
236,831
|
manns/pyspread
|
pyspread/src/model/model.py
|
CodeArray._get_updated_environment
|
def _get_updated_environment(self, env_dict=None):
"""Returns globals environment with 'magic' variable
Parameters
----------
env_dict: Dict, defaults to {'S': self}
\tDict that maps global variable name to value
"""
if env_dict is None:
env_dict = {'S': self}
env = globals().copy()
env.update(env_dict)
return env
|
python
|
def _get_updated_environment(self, env_dict=None):
"""Returns globals environment with 'magic' variable
Parameters
----------
env_dict: Dict, defaults to {'S': self}
\tDict that maps global variable name to value
"""
if env_dict is None:
env_dict = {'S': self}
env = globals().copy()
env.update(env_dict)
return env
|
[
"def",
"_get_updated_environment",
"(",
"self",
",",
"env_dict",
"=",
"None",
")",
":",
"if",
"env_dict",
"is",
"None",
":",
"env_dict",
"=",
"{",
"'S'",
":",
"self",
"}",
"env",
"=",
"globals",
"(",
")",
".",
"copy",
"(",
")",
"env",
".",
"update",
"(",
"env_dict",
")",
"return",
"env"
] |
Returns globals environment with 'magic' variable
Parameters
----------
env_dict: Dict, defaults to {'S': self}
\tDict that maps global variable name to value
|
[
"Returns",
"globals",
"environment",
"with",
"magic",
"variable"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1214-L1230
|
236,832
|
manns/pyspread
|
pyspread/src/model/model.py
|
CodeArray._eval_cell
|
def _eval_cell(self, key, code):
"""Evaluates one cell and returns its result"""
# Flatten helper function
def nn(val):
"""Returns flat numpy arraz without None values"""
try:
return numpy.array(filter(None, val.flat))
except AttributeError:
# Probably no numpy array
return numpy.array(filter(None, val))
# Set up environment for evaluation
env_dict = {'X': key[0], 'Y': key[1], 'Z': key[2], 'bz2': bz2,
'base64': base64, 'charts': charts, 'nn': nn,
'R': key[0], 'C': key[1], 'T': key[2], 'S': self,
'vlcpanel_factory': vlcpanel_factory}
env = self._get_updated_environment(env_dict=env_dict)
#_old_code = self(key)
# Return cell value if in safe mode
if self.safe_mode:
return code
# If cell is not present return None
if code is None:
return
elif is_generator_like(code):
# We have a generator object
return numpy.array(self._make_nested_list(code), dtype="O")
# If only 1 term in front of the "=" --> global
try:
assignment_target_error = None
module = ast.parse(code)
assignment_target_end = self._get_assignment_target_end(module)
except ValueError, err:
assignment_target_error = ValueError(err)
except AttributeError, err:
# Attribute Error includes RunTimeError
assignment_target_error = AttributeError(err)
except Exception, err:
assignment_target_error = Exception(err)
if assignment_target_error is None and assignment_target_end != -1:
glob_var = code[:assignment_target_end]
expression = code.split("=", 1)[1]
expression = expression.strip()
# Delete result cache because assignment changes results
self.result_cache.clear()
else:
glob_var = None
expression = code
if assignment_target_error is not None:
result = assignment_target_error
else:
try:
import signal
signal.signal(signal.SIGALRM, self.handler)
signal.alarm(config["timeout"])
except:
# No POSIX system
pass
try:
result = eval(expression, env, {})
except AttributeError, err:
# Attribute Error includes RunTimeError
result = AttributeError(err)
except RuntimeError, err:
result = RuntimeError(err)
except Exception, err:
result = Exception(err)
finally:
try:
signal.alarm(0)
except:
# No POSIX system
pass
# Change back cell value for evaluation from other cells
#self.dict_grid[key] = _old_code
if glob_var is not None:
globals().update({glob_var: result})
return result
|
python
|
def _eval_cell(self, key, code):
"""Evaluates one cell and returns its result"""
# Flatten helper function
def nn(val):
"""Returns flat numpy arraz without None values"""
try:
return numpy.array(filter(None, val.flat))
except AttributeError:
# Probably no numpy array
return numpy.array(filter(None, val))
# Set up environment for evaluation
env_dict = {'X': key[0], 'Y': key[1], 'Z': key[2], 'bz2': bz2,
'base64': base64, 'charts': charts, 'nn': nn,
'R': key[0], 'C': key[1], 'T': key[2], 'S': self,
'vlcpanel_factory': vlcpanel_factory}
env = self._get_updated_environment(env_dict=env_dict)
#_old_code = self(key)
# Return cell value if in safe mode
if self.safe_mode:
return code
# If cell is not present return None
if code is None:
return
elif is_generator_like(code):
# We have a generator object
return numpy.array(self._make_nested_list(code), dtype="O")
# If only 1 term in front of the "=" --> global
try:
assignment_target_error = None
module = ast.parse(code)
assignment_target_end = self._get_assignment_target_end(module)
except ValueError, err:
assignment_target_error = ValueError(err)
except AttributeError, err:
# Attribute Error includes RunTimeError
assignment_target_error = AttributeError(err)
except Exception, err:
assignment_target_error = Exception(err)
if assignment_target_error is None and assignment_target_end != -1:
glob_var = code[:assignment_target_end]
expression = code.split("=", 1)[1]
expression = expression.strip()
# Delete result cache because assignment changes results
self.result_cache.clear()
else:
glob_var = None
expression = code
if assignment_target_error is not None:
result = assignment_target_error
else:
try:
import signal
signal.signal(signal.SIGALRM, self.handler)
signal.alarm(config["timeout"])
except:
# No POSIX system
pass
try:
result = eval(expression, env, {})
except AttributeError, err:
# Attribute Error includes RunTimeError
result = AttributeError(err)
except RuntimeError, err:
result = RuntimeError(err)
except Exception, err:
result = Exception(err)
finally:
try:
signal.alarm(0)
except:
# No POSIX system
pass
# Change back cell value for evaluation from other cells
#self.dict_grid[key] = _old_code
if glob_var is not None:
globals().update({glob_var: result})
return result
|
[
"def",
"_eval_cell",
"(",
"self",
",",
"key",
",",
"code",
")",
":",
"# Flatten helper function",
"def",
"nn",
"(",
"val",
")",
":",
"\"\"\"Returns flat numpy arraz without None values\"\"\"",
"try",
":",
"return",
"numpy",
".",
"array",
"(",
"filter",
"(",
"None",
",",
"val",
".",
"flat",
")",
")",
"except",
"AttributeError",
":",
"# Probably no numpy array",
"return",
"numpy",
".",
"array",
"(",
"filter",
"(",
"None",
",",
"val",
")",
")",
"# Set up environment for evaluation",
"env_dict",
"=",
"{",
"'X'",
":",
"key",
"[",
"0",
"]",
",",
"'Y'",
":",
"key",
"[",
"1",
"]",
",",
"'Z'",
":",
"key",
"[",
"2",
"]",
",",
"'bz2'",
":",
"bz2",
",",
"'base64'",
":",
"base64",
",",
"'charts'",
":",
"charts",
",",
"'nn'",
":",
"nn",
",",
"'R'",
":",
"key",
"[",
"0",
"]",
",",
"'C'",
":",
"key",
"[",
"1",
"]",
",",
"'T'",
":",
"key",
"[",
"2",
"]",
",",
"'S'",
":",
"self",
",",
"'vlcpanel_factory'",
":",
"vlcpanel_factory",
"}",
"env",
"=",
"self",
".",
"_get_updated_environment",
"(",
"env_dict",
"=",
"env_dict",
")",
"#_old_code = self(key)",
"# Return cell value if in safe mode",
"if",
"self",
".",
"safe_mode",
":",
"return",
"code",
"# If cell is not present return None",
"if",
"code",
"is",
"None",
":",
"return",
"elif",
"is_generator_like",
"(",
"code",
")",
":",
"# We have a generator object",
"return",
"numpy",
".",
"array",
"(",
"self",
".",
"_make_nested_list",
"(",
"code",
")",
",",
"dtype",
"=",
"\"O\"",
")",
"# If only 1 term in front of the \"=\" --> global",
"try",
":",
"assignment_target_error",
"=",
"None",
"module",
"=",
"ast",
".",
"parse",
"(",
"code",
")",
"assignment_target_end",
"=",
"self",
".",
"_get_assignment_target_end",
"(",
"module",
")",
"except",
"ValueError",
",",
"err",
":",
"assignment_target_error",
"=",
"ValueError",
"(",
"err",
")",
"except",
"AttributeError",
",",
"err",
":",
"# Attribute Error includes RunTimeError",
"assignment_target_error",
"=",
"AttributeError",
"(",
"err",
")",
"except",
"Exception",
",",
"err",
":",
"assignment_target_error",
"=",
"Exception",
"(",
"err",
")",
"if",
"assignment_target_error",
"is",
"None",
"and",
"assignment_target_end",
"!=",
"-",
"1",
":",
"glob_var",
"=",
"code",
"[",
":",
"assignment_target_end",
"]",
"expression",
"=",
"code",
".",
"split",
"(",
"\"=\"",
",",
"1",
")",
"[",
"1",
"]",
"expression",
"=",
"expression",
".",
"strip",
"(",
")",
"# Delete result cache because assignment changes results",
"self",
".",
"result_cache",
".",
"clear",
"(",
")",
"else",
":",
"glob_var",
"=",
"None",
"expression",
"=",
"code",
"if",
"assignment_target_error",
"is",
"not",
"None",
":",
"result",
"=",
"assignment_target_error",
"else",
":",
"try",
":",
"import",
"signal",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGALRM",
",",
"self",
".",
"handler",
")",
"signal",
".",
"alarm",
"(",
"config",
"[",
"\"timeout\"",
"]",
")",
"except",
":",
"# No POSIX system",
"pass",
"try",
":",
"result",
"=",
"eval",
"(",
"expression",
",",
"env",
",",
"{",
"}",
")",
"except",
"AttributeError",
",",
"err",
":",
"# Attribute Error includes RunTimeError",
"result",
"=",
"AttributeError",
"(",
"err",
")",
"except",
"RuntimeError",
",",
"err",
":",
"result",
"=",
"RuntimeError",
"(",
"err",
")",
"except",
"Exception",
",",
"err",
":",
"result",
"=",
"Exception",
"(",
"err",
")",
"finally",
":",
"try",
":",
"signal",
".",
"alarm",
"(",
"0",
")",
"except",
":",
"# No POSIX system",
"pass",
"# Change back cell value for evaluation from other cells",
"#self.dict_grid[key] = _old_code",
"if",
"glob_var",
"is",
"not",
"None",
":",
"globals",
"(",
")",
".",
"update",
"(",
"{",
"glob_var",
":",
"result",
"}",
")",
"return",
"result"
] |
Evaluates one cell and returns its result
|
[
"Evaluates",
"one",
"cell",
"and",
"returns",
"its",
"result"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1232-L1340
|
236,833
|
manns/pyspread
|
pyspread/src/model/model.py
|
CodeArray.pop
|
def pop(self, key):
"""Pops dict_grid with undo and redo support
Parameters
----------
key: 3-tuple of Integer
\tCell key that shall be popped
"""
try:
self.result_cache.pop(repr(key))
except KeyError:
pass
return DataArray.pop(self, key)
|
python
|
def pop(self, key):
"""Pops dict_grid with undo and redo support
Parameters
----------
key: 3-tuple of Integer
\tCell key that shall be popped
"""
try:
self.result_cache.pop(repr(key))
except KeyError:
pass
return DataArray.pop(self, key)
|
[
"def",
"pop",
"(",
"self",
",",
"key",
")",
":",
"try",
":",
"self",
".",
"result_cache",
".",
"pop",
"(",
"repr",
"(",
"key",
")",
")",
"except",
"KeyError",
":",
"pass",
"return",
"DataArray",
".",
"pop",
"(",
"self",
",",
"key",
")"
] |
Pops dict_grid with undo and redo support
Parameters
----------
key: 3-tuple of Integer
\tCell key that shall be popped
|
[
"Pops",
"dict_grid",
"with",
"undo",
"and",
"redo",
"support"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1342-L1358
|
236,834
|
manns/pyspread
|
pyspread/src/model/model.py
|
CodeArray.reload_modules
|
def reload_modules(self):
"""Reloads modules that are available in cells"""
import src.lib.charts as charts
from src.gui.grid_panels import vlcpanel_factory
modules = [charts, bz2, base64, re, ast, sys, wx, numpy, datetime]
for module in modules:
reload(module)
|
python
|
def reload_modules(self):
"""Reloads modules that are available in cells"""
import src.lib.charts as charts
from src.gui.grid_panels import vlcpanel_factory
modules = [charts, bz2, base64, re, ast, sys, wx, numpy, datetime]
for module in modules:
reload(module)
|
[
"def",
"reload_modules",
"(",
"self",
")",
":",
"import",
"src",
".",
"lib",
".",
"charts",
"as",
"charts",
"from",
"src",
".",
"gui",
".",
"grid_panels",
"import",
"vlcpanel_factory",
"modules",
"=",
"[",
"charts",
",",
"bz2",
",",
"base64",
",",
"re",
",",
"ast",
",",
"sys",
",",
"wx",
",",
"numpy",
",",
"datetime",
"]",
"for",
"module",
"in",
"modules",
":",
"reload",
"(",
"module",
")"
] |
Reloads modules that are available in cells
|
[
"Reloads",
"modules",
"that",
"are",
"available",
"in",
"cells"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1360-L1368
|
236,835
|
manns/pyspread
|
pyspread/src/model/model.py
|
CodeArray.clear_globals
|
def clear_globals(self):
"""Clears all newly assigned globals"""
base_keys = ['cStringIO', 'IntType', 'KeyValueStore', 'undoable',
'is_generator_like', 'is_string_like', 'bz2', 'base64',
'__package__', 're', 'config', '__doc__', 'SliceType',
'CellAttributes', 'product', 'ast', '__builtins__',
'__file__', 'charts', 'sys', 'is_slice_like', '__name__',
'copy', 'imap', 'wx', 'ifilter', 'Selection', 'DictGrid',
'numpy', 'CodeArray', 'DataArray', 'datetime',
'vlcpanel_factory']
for key in globals().keys():
if key not in base_keys:
globals().pop(key)
|
python
|
def clear_globals(self):
"""Clears all newly assigned globals"""
base_keys = ['cStringIO', 'IntType', 'KeyValueStore', 'undoable',
'is_generator_like', 'is_string_like', 'bz2', 'base64',
'__package__', 're', 'config', '__doc__', 'SliceType',
'CellAttributes', 'product', 'ast', '__builtins__',
'__file__', 'charts', 'sys', 'is_slice_like', '__name__',
'copy', 'imap', 'wx', 'ifilter', 'Selection', 'DictGrid',
'numpy', 'CodeArray', 'DataArray', 'datetime',
'vlcpanel_factory']
for key in globals().keys():
if key not in base_keys:
globals().pop(key)
|
[
"def",
"clear_globals",
"(",
"self",
")",
":",
"base_keys",
"=",
"[",
"'cStringIO'",
",",
"'IntType'",
",",
"'KeyValueStore'",
",",
"'undoable'",
",",
"'is_generator_like'",
",",
"'is_string_like'",
",",
"'bz2'",
",",
"'base64'",
",",
"'__package__'",
",",
"'re'",
",",
"'config'",
",",
"'__doc__'",
",",
"'SliceType'",
",",
"'CellAttributes'",
",",
"'product'",
",",
"'ast'",
",",
"'__builtins__'",
",",
"'__file__'",
",",
"'charts'",
",",
"'sys'",
",",
"'is_slice_like'",
",",
"'__name__'",
",",
"'copy'",
",",
"'imap'",
",",
"'wx'",
",",
"'ifilter'",
",",
"'Selection'",
",",
"'DictGrid'",
",",
"'numpy'",
",",
"'CodeArray'",
",",
"'DataArray'",
",",
"'datetime'",
",",
"'vlcpanel_factory'",
"]",
"for",
"key",
"in",
"globals",
"(",
")",
".",
"keys",
"(",
")",
":",
"if",
"key",
"not",
"in",
"base_keys",
":",
"globals",
"(",
")",
".",
"pop",
"(",
"key",
")"
] |
Clears all newly assigned globals
|
[
"Clears",
"all",
"newly",
"assigned",
"globals"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1370-L1384
|
236,836
|
manns/pyspread
|
pyspread/src/model/model.py
|
CodeArray.execute_macros
|
def execute_macros(self):
"""Executes all macros and returns result string
Executes macros only when not in safe_mode
"""
if self.safe_mode:
return '', "Safe mode activated. Code not executed."
# Windows exec does not like Windows newline
self.macros = self.macros.replace('\r\n', '\n')
# Set up environment for evaluation
globals().update(self._get_updated_environment())
# Create file-like string to capture output
code_out = cStringIO.StringIO()
code_err = cStringIO.StringIO()
err_msg = cStringIO.StringIO()
# Capture output and errors
sys.stdout = code_out
sys.stderr = code_err
try:
import signal
signal.signal(signal.SIGALRM, self.handler)
signal.alarm(config["timeout"])
except:
# No POSIX system
pass
try:
exec(self.macros, globals())
try:
signal.alarm(0)
except:
# No POSIX system
pass
except Exception:
# Print exception
# (Because of how the globals are handled during execution
# we must import modules here)
from traceback import print_exception
from src.lib.exception_handling import get_user_codeframe
exc_info = sys.exc_info()
user_tb = get_user_codeframe(exc_info[2]) or exc_info[2]
print_exception(exc_info[0], exc_info[1], user_tb, None, err_msg)
# Restore stdout and stderr
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
results = code_out.getvalue()
errs = code_err.getvalue() + err_msg.getvalue()
code_out.close()
code_err.close()
# Reset result cache
self.result_cache.clear()
# Reset frozen cache
self.frozen_cache.clear()
return results, errs
|
python
|
def execute_macros(self):
"""Executes all macros and returns result string
Executes macros only when not in safe_mode
"""
if self.safe_mode:
return '', "Safe mode activated. Code not executed."
# Windows exec does not like Windows newline
self.macros = self.macros.replace('\r\n', '\n')
# Set up environment for evaluation
globals().update(self._get_updated_environment())
# Create file-like string to capture output
code_out = cStringIO.StringIO()
code_err = cStringIO.StringIO()
err_msg = cStringIO.StringIO()
# Capture output and errors
sys.stdout = code_out
sys.stderr = code_err
try:
import signal
signal.signal(signal.SIGALRM, self.handler)
signal.alarm(config["timeout"])
except:
# No POSIX system
pass
try:
exec(self.macros, globals())
try:
signal.alarm(0)
except:
# No POSIX system
pass
except Exception:
# Print exception
# (Because of how the globals are handled during execution
# we must import modules here)
from traceback import print_exception
from src.lib.exception_handling import get_user_codeframe
exc_info = sys.exc_info()
user_tb = get_user_codeframe(exc_info[2]) or exc_info[2]
print_exception(exc_info[0], exc_info[1], user_tb, None, err_msg)
# Restore stdout and stderr
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
results = code_out.getvalue()
errs = code_err.getvalue() + err_msg.getvalue()
code_out.close()
code_err.close()
# Reset result cache
self.result_cache.clear()
# Reset frozen cache
self.frozen_cache.clear()
return results, errs
|
[
"def",
"execute_macros",
"(",
"self",
")",
":",
"if",
"self",
".",
"safe_mode",
":",
"return",
"''",
",",
"\"Safe mode activated. Code not executed.\"",
"# Windows exec does not like Windows newline",
"self",
".",
"macros",
"=",
"self",
".",
"macros",
".",
"replace",
"(",
"'\\r\\n'",
",",
"'\\n'",
")",
"# Set up environment for evaluation",
"globals",
"(",
")",
".",
"update",
"(",
"self",
".",
"_get_updated_environment",
"(",
")",
")",
"# Create file-like string to capture output",
"code_out",
"=",
"cStringIO",
".",
"StringIO",
"(",
")",
"code_err",
"=",
"cStringIO",
".",
"StringIO",
"(",
")",
"err_msg",
"=",
"cStringIO",
".",
"StringIO",
"(",
")",
"# Capture output and errors",
"sys",
".",
"stdout",
"=",
"code_out",
"sys",
".",
"stderr",
"=",
"code_err",
"try",
":",
"import",
"signal",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGALRM",
",",
"self",
".",
"handler",
")",
"signal",
".",
"alarm",
"(",
"config",
"[",
"\"timeout\"",
"]",
")",
"except",
":",
"# No POSIX system",
"pass",
"try",
":",
"exec",
"(",
"self",
".",
"macros",
",",
"globals",
"(",
")",
")",
"try",
":",
"signal",
".",
"alarm",
"(",
"0",
")",
"except",
":",
"# No POSIX system",
"pass",
"except",
"Exception",
":",
"# Print exception",
"# (Because of how the globals are handled during execution",
"# we must import modules here)",
"from",
"traceback",
"import",
"print_exception",
"from",
"src",
".",
"lib",
".",
"exception_handling",
"import",
"get_user_codeframe",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"user_tb",
"=",
"get_user_codeframe",
"(",
"exc_info",
"[",
"2",
"]",
")",
"or",
"exc_info",
"[",
"2",
"]",
"print_exception",
"(",
"exc_info",
"[",
"0",
"]",
",",
"exc_info",
"[",
"1",
"]",
",",
"user_tb",
",",
"None",
",",
"err_msg",
")",
"# Restore stdout and stderr",
"sys",
".",
"stdout",
"=",
"sys",
".",
"__stdout__",
"sys",
".",
"stderr",
"=",
"sys",
".",
"__stderr__",
"results",
"=",
"code_out",
".",
"getvalue",
"(",
")",
"errs",
"=",
"code_err",
".",
"getvalue",
"(",
")",
"+",
"err_msg",
".",
"getvalue",
"(",
")",
"code_out",
".",
"close",
"(",
")",
"code_err",
".",
"close",
"(",
")",
"# Reset result cache",
"self",
".",
"result_cache",
".",
"clear",
"(",
")",
"# Reset frozen cache",
"self",
".",
"frozen_cache",
".",
"clear",
"(",
")",
"return",
"results",
",",
"errs"
] |
Executes all macros and returns result string
Executes macros only when not in safe_mode
|
[
"Executes",
"all",
"macros",
"and",
"returns",
"result",
"string"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1391-L1459
|
236,837
|
manns/pyspread
|
pyspread/src/model/model.py
|
CodeArray._sorted_keys
|
def _sorted_keys(self, keys, startkey, reverse=False):
"""Generator that yields sorted keys starting with startkey
Parameters
----------
keys: Iterable of tuple/list
\tKey sequence that is sorted
startkey: Tuple/list
\tFirst key to be yielded
reverse: Bool
\tSort direction reversed if True
"""
tuple_key = lambda t: t[::-1]
if reverse:
tuple_cmp = lambda t: t[::-1] > startkey[::-1]
else:
tuple_cmp = lambda t: t[::-1] < startkey[::-1]
searchkeys = sorted(keys, key=tuple_key, reverse=reverse)
searchpos = sum(1 for _ in ifilter(tuple_cmp, searchkeys))
searchkeys = searchkeys[searchpos:] + searchkeys[:searchpos]
for key in searchkeys:
yield key
|
python
|
def _sorted_keys(self, keys, startkey, reverse=False):
"""Generator that yields sorted keys starting with startkey
Parameters
----------
keys: Iterable of tuple/list
\tKey sequence that is sorted
startkey: Tuple/list
\tFirst key to be yielded
reverse: Bool
\tSort direction reversed if True
"""
tuple_key = lambda t: t[::-1]
if reverse:
tuple_cmp = lambda t: t[::-1] > startkey[::-1]
else:
tuple_cmp = lambda t: t[::-1] < startkey[::-1]
searchkeys = sorted(keys, key=tuple_key, reverse=reverse)
searchpos = sum(1 for _ in ifilter(tuple_cmp, searchkeys))
searchkeys = searchkeys[searchpos:] + searchkeys[:searchpos]
for key in searchkeys:
yield key
|
[
"def",
"_sorted_keys",
"(",
"self",
",",
"keys",
",",
"startkey",
",",
"reverse",
"=",
"False",
")",
":",
"tuple_key",
"=",
"lambda",
"t",
":",
"t",
"[",
":",
":",
"-",
"1",
"]",
"if",
"reverse",
":",
"tuple_cmp",
"=",
"lambda",
"t",
":",
"t",
"[",
":",
":",
"-",
"1",
"]",
">",
"startkey",
"[",
":",
":",
"-",
"1",
"]",
"else",
":",
"tuple_cmp",
"=",
"lambda",
"t",
":",
"t",
"[",
":",
":",
"-",
"1",
"]",
"<",
"startkey",
"[",
":",
":",
"-",
"1",
"]",
"searchkeys",
"=",
"sorted",
"(",
"keys",
",",
"key",
"=",
"tuple_key",
",",
"reverse",
"=",
"reverse",
")",
"searchpos",
"=",
"sum",
"(",
"1",
"for",
"_",
"in",
"ifilter",
"(",
"tuple_cmp",
",",
"searchkeys",
")",
")",
"searchkeys",
"=",
"searchkeys",
"[",
"searchpos",
":",
"]",
"+",
"searchkeys",
"[",
":",
"searchpos",
"]",
"for",
"key",
"in",
"searchkeys",
":",
"yield",
"key"
] |
Generator that yields sorted keys starting with startkey
Parameters
----------
keys: Iterable of tuple/list
\tKey sequence that is sorted
startkey: Tuple/list
\tFirst key to be yielded
reverse: Bool
\tSort direction reversed if True
|
[
"Generator",
"that",
"yields",
"sorted",
"keys",
"starting",
"with",
"startkey"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1461-L1488
|
236,838
|
manns/pyspread
|
pyspread/src/model/model.py
|
CodeArray.findnextmatch
|
def findnextmatch(self, startkey, find_string, flags, search_result=True):
""" Returns a tuple with the position of the next match of find_string
Returns None if string not found.
Parameters:
-----------
startkey: Start position of search
find_string:String to be searched for
flags: List of strings, out of
["UP" xor "DOWN", "WHOLE_WORD", "MATCH_CASE", "REG_EXP"]
search_result: Bool, defaults to True
\tIf True then the search includes the result string (slower)
"""
assert "UP" in flags or "DOWN" in flags
assert not ("UP" in flags and "DOWN" in flags)
if search_result:
def is_matching(key, find_string, flags):
code = self(key)
if self.string_match(code, find_string, flags) is not None:
return True
else:
res_str = unicode(self[key])
return self.string_match(res_str, find_string, flags) \
is not None
else:
def is_matching(code, find_string, flags):
code = self(key)
return self.string_match(code, find_string, flags) is not None
# List of keys in sgrid in search order
reverse = "UP" in flags
for key in self._sorted_keys(self.keys(), startkey, reverse=reverse):
try:
if is_matching(key, find_string, flags):
return key
except Exception:
# re errors are cryptical: sre_constants,...
pass
|
python
|
def findnextmatch(self, startkey, find_string, flags, search_result=True):
""" Returns a tuple with the position of the next match of find_string
Returns None if string not found.
Parameters:
-----------
startkey: Start position of search
find_string:String to be searched for
flags: List of strings, out of
["UP" xor "DOWN", "WHOLE_WORD", "MATCH_CASE", "REG_EXP"]
search_result: Bool, defaults to True
\tIf True then the search includes the result string (slower)
"""
assert "UP" in flags or "DOWN" in flags
assert not ("UP" in flags and "DOWN" in flags)
if search_result:
def is_matching(key, find_string, flags):
code = self(key)
if self.string_match(code, find_string, flags) is not None:
return True
else:
res_str = unicode(self[key])
return self.string_match(res_str, find_string, flags) \
is not None
else:
def is_matching(code, find_string, flags):
code = self(key)
return self.string_match(code, find_string, flags) is not None
# List of keys in sgrid in search order
reverse = "UP" in flags
for key in self._sorted_keys(self.keys(), startkey, reverse=reverse):
try:
if is_matching(key, find_string, flags):
return key
except Exception:
# re errors are cryptical: sre_constants,...
pass
|
[
"def",
"findnextmatch",
"(",
"self",
",",
"startkey",
",",
"find_string",
",",
"flags",
",",
"search_result",
"=",
"True",
")",
":",
"assert",
"\"UP\"",
"in",
"flags",
"or",
"\"DOWN\"",
"in",
"flags",
"assert",
"not",
"(",
"\"UP\"",
"in",
"flags",
"and",
"\"DOWN\"",
"in",
"flags",
")",
"if",
"search_result",
":",
"def",
"is_matching",
"(",
"key",
",",
"find_string",
",",
"flags",
")",
":",
"code",
"=",
"self",
"(",
"key",
")",
"if",
"self",
".",
"string_match",
"(",
"code",
",",
"find_string",
",",
"flags",
")",
"is",
"not",
"None",
":",
"return",
"True",
"else",
":",
"res_str",
"=",
"unicode",
"(",
"self",
"[",
"key",
"]",
")",
"return",
"self",
".",
"string_match",
"(",
"res_str",
",",
"find_string",
",",
"flags",
")",
"is",
"not",
"None",
"else",
":",
"def",
"is_matching",
"(",
"code",
",",
"find_string",
",",
"flags",
")",
":",
"code",
"=",
"self",
"(",
"key",
")",
"return",
"self",
".",
"string_match",
"(",
"code",
",",
"find_string",
",",
"flags",
")",
"is",
"not",
"None",
"# List of keys in sgrid in search order",
"reverse",
"=",
"\"UP\"",
"in",
"flags",
"for",
"key",
"in",
"self",
".",
"_sorted_keys",
"(",
"self",
".",
"keys",
"(",
")",
",",
"startkey",
",",
"reverse",
"=",
"reverse",
")",
":",
"try",
":",
"if",
"is_matching",
"(",
"key",
",",
"find_string",
",",
"flags",
")",
":",
"return",
"key",
"except",
"Exception",
":",
"# re errors are cryptical: sre_constants,...",
"pass"
] |
Returns a tuple with the position of the next match of find_string
Returns None if string not found.
Parameters:
-----------
startkey: Start position of search
find_string:String to be searched for
flags: List of strings, out of
["UP" xor "DOWN", "WHOLE_WORD", "MATCH_CASE", "REG_EXP"]
search_result: Bool, defaults to True
\tIf True then the search includes the result string (slower)
|
[
"Returns",
"a",
"tuple",
"with",
"the",
"position",
"of",
"the",
"next",
"match",
"of",
"find_string"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/model/model.py#L1532-L1577
|
236,839
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
IntValidator.Validate
|
def Validate(self, win):
"""Returns True if Value in digits, False otherwise"""
val = self.GetWindow().GetValue()
for x in val:
if x not in string.digits:
return False
return True
|
python
|
def Validate(self, win):
"""Returns True if Value in digits, False otherwise"""
val = self.GetWindow().GetValue()
for x in val:
if x not in string.digits:
return False
return True
|
[
"def",
"Validate",
"(",
"self",
",",
"win",
")",
":",
"val",
"=",
"self",
".",
"GetWindow",
"(",
")",
".",
"GetValue",
"(",
")",
"for",
"x",
"in",
"val",
":",
"if",
"x",
"not",
"in",
"string",
".",
"digits",
":",
"return",
"False",
"return",
"True"
] |
Returns True if Value in digits, False otherwise
|
[
"Returns",
"True",
"if",
"Value",
"in",
"digits",
"False",
"otherwise"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L99-L108
|
236,840
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
IntValidator.OnChar
|
def OnChar(self, event):
"""Eats event if key not in digits"""
key = event.GetKeyCode()
if key < wx.WXK_SPACE or key == wx.WXK_DELETE or key > 255 or \
chr(key) in string.digits:
event.Skip()
|
python
|
def OnChar(self, event):
"""Eats event if key not in digits"""
key = event.GetKeyCode()
if key < wx.WXK_SPACE or key == wx.WXK_DELETE or key > 255 or \
chr(key) in string.digits:
event.Skip()
|
[
"def",
"OnChar",
"(",
"self",
",",
"event",
")",
":",
"key",
"=",
"event",
".",
"GetKeyCode",
"(",
")",
"if",
"key",
"<",
"wx",
".",
"WXK_SPACE",
"or",
"key",
"==",
"wx",
".",
"WXK_DELETE",
"or",
"key",
">",
"255",
"or",
"chr",
"(",
"key",
")",
"in",
"string",
".",
"digits",
":",
"event",
".",
"Skip",
"(",
")"
] |
Eats event if key not in digits
|
[
"Eats",
"event",
"if",
"key",
"not",
"in",
"digits"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L110-L117
|
236,841
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
ChoiceRenderer.Draw
|
def Draw(self, grid, attr, dc, rect, row, col, is_selected):
"""Draws the text and the combobox icon"""
render = wx.RendererNative.Get()
# clear the background
dc.SetBackgroundMode(wx.SOLID)
if is_selected:
dc.SetBrush(wx.Brush(wx.BLUE, wx.SOLID))
dc.SetPen(wx.Pen(wx.BLUE, 1, wx.SOLID))
else:
dc.SetBrush(wx.Brush(wx.WHITE, wx.SOLID))
dc.SetPen(wx.Pen(wx.WHITE, 1, wx.SOLID))
dc.DrawRectangleRect(rect)
cb_lbl = grid.GetCellValue(row, col)
string_x = rect.x + 2
string_y = rect.y + 2
dc.DrawText(cb_lbl, string_x, string_y)
button_x = rect.x + rect.width - self.iconwidth
button_y = rect.y
button_width = self.iconwidth
button_height = rect.height
button_size = button_x, button_y, button_width, button_height
render.DrawComboBoxDropButton(grid, dc, button_size,
wx.CONTROL_CURRENT)
|
python
|
def Draw(self, grid, attr, dc, rect, row, col, is_selected):
"""Draws the text and the combobox icon"""
render = wx.RendererNative.Get()
# clear the background
dc.SetBackgroundMode(wx.SOLID)
if is_selected:
dc.SetBrush(wx.Brush(wx.BLUE, wx.SOLID))
dc.SetPen(wx.Pen(wx.BLUE, 1, wx.SOLID))
else:
dc.SetBrush(wx.Brush(wx.WHITE, wx.SOLID))
dc.SetPen(wx.Pen(wx.WHITE, 1, wx.SOLID))
dc.DrawRectangleRect(rect)
cb_lbl = grid.GetCellValue(row, col)
string_x = rect.x + 2
string_y = rect.y + 2
dc.DrawText(cb_lbl, string_x, string_y)
button_x = rect.x + rect.width - self.iconwidth
button_y = rect.y
button_width = self.iconwidth
button_height = rect.height
button_size = button_x, button_y, button_width, button_height
render.DrawComboBoxDropButton(grid, dc, button_size,
wx.CONTROL_CURRENT)
|
[
"def",
"Draw",
"(",
"self",
",",
"grid",
",",
"attr",
",",
"dc",
",",
"rect",
",",
"row",
",",
"col",
",",
"is_selected",
")",
":",
"render",
"=",
"wx",
".",
"RendererNative",
".",
"Get",
"(",
")",
"# clear the background",
"dc",
".",
"SetBackgroundMode",
"(",
"wx",
".",
"SOLID",
")",
"if",
"is_selected",
":",
"dc",
".",
"SetBrush",
"(",
"wx",
".",
"Brush",
"(",
"wx",
".",
"BLUE",
",",
"wx",
".",
"SOLID",
")",
")",
"dc",
".",
"SetPen",
"(",
"wx",
".",
"Pen",
"(",
"wx",
".",
"BLUE",
",",
"1",
",",
"wx",
".",
"SOLID",
")",
")",
"else",
":",
"dc",
".",
"SetBrush",
"(",
"wx",
".",
"Brush",
"(",
"wx",
".",
"WHITE",
",",
"wx",
".",
"SOLID",
")",
")",
"dc",
".",
"SetPen",
"(",
"wx",
".",
"Pen",
"(",
"wx",
".",
"WHITE",
",",
"1",
",",
"wx",
".",
"SOLID",
")",
")",
"dc",
".",
"DrawRectangleRect",
"(",
"rect",
")",
"cb_lbl",
"=",
"grid",
".",
"GetCellValue",
"(",
"row",
",",
"col",
")",
"string_x",
"=",
"rect",
".",
"x",
"+",
"2",
"string_y",
"=",
"rect",
".",
"y",
"+",
"2",
"dc",
".",
"DrawText",
"(",
"cb_lbl",
",",
"string_x",
",",
"string_y",
")",
"button_x",
"=",
"rect",
".",
"x",
"+",
"rect",
".",
"width",
"-",
"self",
".",
"iconwidth",
"button_y",
"=",
"rect",
".",
"y",
"button_width",
"=",
"self",
".",
"iconwidth",
"button_height",
"=",
"rect",
".",
"height",
"button_size",
"=",
"button_x",
",",
"button_y",
",",
"button_width",
",",
"button_height",
"render",
".",
"DrawComboBoxDropButton",
"(",
"grid",
",",
"dc",
",",
"button_size",
",",
"wx",
".",
"CONTROL_CURRENT",
")"
] |
Draws the text and the combobox icon
|
[
"Draws",
"the",
"text",
"and",
"the",
"combobox",
"icon"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L140-L167
|
236,842
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CsvParameterWidgets._setup_param_widgets
|
def _setup_param_widgets(self):
"""Creates the parameter entry widgets and binds them to methods"""
for parameter in self.csv_params:
pname, ptype, plabel, phelp = parameter
label = wx.StaticText(self.parent, -1, plabel)
widget = self.type2widget[ptype](self.parent)
# Append choicebox items and bind handler
if pname in self.choices:
widget.AppendItems(self.choices[pname])
widget.SetValue = widget.Select
widget.SetSelection(0)
# Bind event handler to widget
if ptype is types.StringType or ptype is types.UnicodeType:
event_type = wx.EVT_TEXT
elif ptype is types.BooleanType:
event_type = wx.EVT_CHECKBOX
else:
event_type = wx.EVT_CHOICE
handler = getattr(self, self.widget_handlers[pname])
self.parent.Bind(event_type, handler, widget)
# Tool tips
label.SetToolTipString(phelp)
widget.SetToolTipString(phelp)
label.__name__ = wx.StaticText.__name__.lower()
widget.__name__ = self.type2widget[ptype].__name__.lower()
self.param_labels.append(label)
self.param_widgets.append(widget)
self.__setattr__("_".join([label.__name__, pname]), label)
self.__setattr__("_".join([widget.__name__, pname]), widget)
|
python
|
def _setup_param_widgets(self):
"""Creates the parameter entry widgets and binds them to methods"""
for parameter in self.csv_params:
pname, ptype, plabel, phelp = parameter
label = wx.StaticText(self.parent, -1, plabel)
widget = self.type2widget[ptype](self.parent)
# Append choicebox items and bind handler
if pname in self.choices:
widget.AppendItems(self.choices[pname])
widget.SetValue = widget.Select
widget.SetSelection(0)
# Bind event handler to widget
if ptype is types.StringType or ptype is types.UnicodeType:
event_type = wx.EVT_TEXT
elif ptype is types.BooleanType:
event_type = wx.EVT_CHECKBOX
else:
event_type = wx.EVT_CHOICE
handler = getattr(self, self.widget_handlers[pname])
self.parent.Bind(event_type, handler, widget)
# Tool tips
label.SetToolTipString(phelp)
widget.SetToolTipString(phelp)
label.__name__ = wx.StaticText.__name__.lower()
widget.__name__ = self.type2widget[ptype].__name__.lower()
self.param_labels.append(label)
self.param_widgets.append(widget)
self.__setattr__("_".join([label.__name__, pname]), label)
self.__setattr__("_".join([widget.__name__, pname]), widget)
|
[
"def",
"_setup_param_widgets",
"(",
"self",
")",
":",
"for",
"parameter",
"in",
"self",
".",
"csv_params",
":",
"pname",
",",
"ptype",
",",
"plabel",
",",
"phelp",
"=",
"parameter",
"label",
"=",
"wx",
".",
"StaticText",
"(",
"self",
".",
"parent",
",",
"-",
"1",
",",
"plabel",
")",
"widget",
"=",
"self",
".",
"type2widget",
"[",
"ptype",
"]",
"(",
"self",
".",
"parent",
")",
"# Append choicebox items and bind handler",
"if",
"pname",
"in",
"self",
".",
"choices",
":",
"widget",
".",
"AppendItems",
"(",
"self",
".",
"choices",
"[",
"pname",
"]",
")",
"widget",
".",
"SetValue",
"=",
"widget",
".",
"Select",
"widget",
".",
"SetSelection",
"(",
"0",
")",
"# Bind event handler to widget",
"if",
"ptype",
"is",
"types",
".",
"StringType",
"or",
"ptype",
"is",
"types",
".",
"UnicodeType",
":",
"event_type",
"=",
"wx",
".",
"EVT_TEXT",
"elif",
"ptype",
"is",
"types",
".",
"BooleanType",
":",
"event_type",
"=",
"wx",
".",
"EVT_CHECKBOX",
"else",
":",
"event_type",
"=",
"wx",
".",
"EVT_CHOICE",
"handler",
"=",
"getattr",
"(",
"self",
",",
"self",
".",
"widget_handlers",
"[",
"pname",
"]",
")",
"self",
".",
"parent",
".",
"Bind",
"(",
"event_type",
",",
"handler",
",",
"widget",
")",
"# Tool tips",
"label",
".",
"SetToolTipString",
"(",
"phelp",
")",
"widget",
".",
"SetToolTipString",
"(",
"phelp",
")",
"label",
".",
"__name__",
"=",
"wx",
".",
"StaticText",
".",
"__name__",
".",
"lower",
"(",
")",
"widget",
".",
"__name__",
"=",
"self",
".",
"type2widget",
"[",
"ptype",
"]",
".",
"__name__",
".",
"lower",
"(",
")",
"self",
".",
"param_labels",
".",
"append",
"(",
"label",
")",
"self",
".",
"param_widgets",
".",
"append",
"(",
"widget",
")",
"self",
".",
"__setattr__",
"(",
"\"_\"",
".",
"join",
"(",
"[",
"label",
".",
"__name__",
",",
"pname",
"]",
")",
",",
"label",
")",
"self",
".",
"__setattr__",
"(",
"\"_\"",
".",
"join",
"(",
"[",
"widget",
".",
"__name__",
",",
"pname",
"]",
")",
",",
"widget",
")"
] |
Creates the parameter entry widgets and binds them to methods
|
[
"Creates",
"the",
"parameter",
"entry",
"widgets",
"and",
"binds",
"them",
"to",
"methods"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L289-L326
|
236,843
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CsvParameterWidgets._do_layout
|
def _do_layout(self):
"""Sizer hell, returns a sizer that contains all widgets"""
sizer_csvoptions = wx.FlexGridSizer(5, 4, 5, 5)
# Adding parameter widgets to sizer_csvoptions
leftpos = wx.LEFT | wx.ADJUST_MINSIZE
rightpos = wx.RIGHT | wx.EXPAND
current_label_margin = 0 # smaller for left column
other_label_margin = 15
for label, widget in zip(self.param_labels, self.param_widgets):
sizer_csvoptions.Add(label, 0, leftpos, current_label_margin)
sizer_csvoptions.Add(widget, 0, rightpos, current_label_margin)
current_label_margin, other_label_margin = \
other_label_margin, current_label_margin
sizer_csvoptions.AddGrowableCol(1)
sizer_csvoptions.AddGrowableCol(3)
self.sizer_csvoptions = sizer_csvoptions
|
python
|
def _do_layout(self):
"""Sizer hell, returns a sizer that contains all widgets"""
sizer_csvoptions = wx.FlexGridSizer(5, 4, 5, 5)
# Adding parameter widgets to sizer_csvoptions
leftpos = wx.LEFT | wx.ADJUST_MINSIZE
rightpos = wx.RIGHT | wx.EXPAND
current_label_margin = 0 # smaller for left column
other_label_margin = 15
for label, widget in zip(self.param_labels, self.param_widgets):
sizer_csvoptions.Add(label, 0, leftpos, current_label_margin)
sizer_csvoptions.Add(widget, 0, rightpos, current_label_margin)
current_label_margin, other_label_margin = \
other_label_margin, current_label_margin
sizer_csvoptions.AddGrowableCol(1)
sizer_csvoptions.AddGrowableCol(3)
self.sizer_csvoptions = sizer_csvoptions
|
[
"def",
"_do_layout",
"(",
"self",
")",
":",
"sizer_csvoptions",
"=",
"wx",
".",
"FlexGridSizer",
"(",
"5",
",",
"4",
",",
"5",
",",
"5",
")",
"# Adding parameter widgets to sizer_csvoptions",
"leftpos",
"=",
"wx",
".",
"LEFT",
"|",
"wx",
".",
"ADJUST_MINSIZE",
"rightpos",
"=",
"wx",
".",
"RIGHT",
"|",
"wx",
".",
"EXPAND",
"current_label_margin",
"=",
"0",
"# smaller for left column",
"other_label_margin",
"=",
"15",
"for",
"label",
",",
"widget",
"in",
"zip",
"(",
"self",
".",
"param_labels",
",",
"self",
".",
"param_widgets",
")",
":",
"sizer_csvoptions",
".",
"Add",
"(",
"label",
",",
"0",
",",
"leftpos",
",",
"current_label_margin",
")",
"sizer_csvoptions",
".",
"Add",
"(",
"widget",
",",
"0",
",",
"rightpos",
",",
"current_label_margin",
")",
"current_label_margin",
",",
"other_label_margin",
"=",
"other_label_margin",
",",
"current_label_margin",
"sizer_csvoptions",
".",
"AddGrowableCol",
"(",
"1",
")",
"sizer_csvoptions",
".",
"AddGrowableCol",
"(",
"3",
")",
"self",
".",
"sizer_csvoptions",
"=",
"sizer_csvoptions"
] |
Sizer hell, returns a sizer that contains all widgets
|
[
"Sizer",
"hell",
"returns",
"a",
"sizer",
"that",
"contains",
"all",
"widgets"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L328-L350
|
236,844
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CsvParameterWidgets._update_settings
|
def _update_settings(self, dialect):
"""Sets the widget settings to those of the chosen dialect"""
# the first parameter is the dialect itself --> ignore
for parameter in self.csv_params[2:]:
pname, ptype, plabel, phelp = parameter
widget = self._widget_from_p(pname, ptype)
if ptype is types.TupleType:
ptype = types.ObjectType
digest = Digest(acceptable_types=[ptype])
if pname == 'self.has_header':
if self.has_header is not None:
widget.SetValue(digest(self.has_header))
else:
value = getattr(dialect, pname)
widget.SetValue(digest(value))
|
python
|
def _update_settings(self, dialect):
"""Sets the widget settings to those of the chosen dialect"""
# the first parameter is the dialect itself --> ignore
for parameter in self.csv_params[2:]:
pname, ptype, plabel, phelp = parameter
widget = self._widget_from_p(pname, ptype)
if ptype is types.TupleType:
ptype = types.ObjectType
digest = Digest(acceptable_types=[ptype])
if pname == 'self.has_header':
if self.has_header is not None:
widget.SetValue(digest(self.has_header))
else:
value = getattr(dialect, pname)
widget.SetValue(digest(value))
|
[
"def",
"_update_settings",
"(",
"self",
",",
"dialect",
")",
":",
"# the first parameter is the dialect itself --> ignore",
"for",
"parameter",
"in",
"self",
".",
"csv_params",
"[",
"2",
":",
"]",
":",
"pname",
",",
"ptype",
",",
"plabel",
",",
"phelp",
"=",
"parameter",
"widget",
"=",
"self",
".",
"_widget_from_p",
"(",
"pname",
",",
"ptype",
")",
"if",
"ptype",
"is",
"types",
".",
"TupleType",
":",
"ptype",
"=",
"types",
".",
"ObjectType",
"digest",
"=",
"Digest",
"(",
"acceptable_types",
"=",
"[",
"ptype",
"]",
")",
"if",
"pname",
"==",
"'self.has_header'",
":",
"if",
"self",
".",
"has_header",
"is",
"not",
"None",
":",
"widget",
".",
"SetValue",
"(",
"digest",
"(",
"self",
".",
"has_header",
")",
")",
"else",
":",
"value",
"=",
"getattr",
"(",
"dialect",
",",
"pname",
")",
"widget",
".",
"SetValue",
"(",
"digest",
"(",
"value",
")",
")"
] |
Sets the widget settings to those of the chosen dialect
|
[
"Sets",
"the",
"widget",
"settings",
"to",
"those",
"of",
"the",
"chosen",
"dialect"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L352-L371
|
236,845
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CsvParameterWidgets._widget_from_p
|
def _widget_from_p(self, pname, ptype):
"""Returns a widget from its ptype and pname"""
widget_name = self.type2widget[ptype].__name__.lower()
widget_name = "_".join([widget_name, pname])
return getattr(self, widget_name)
|
python
|
def _widget_from_p(self, pname, ptype):
"""Returns a widget from its ptype and pname"""
widget_name = self.type2widget[ptype].__name__.lower()
widget_name = "_".join([widget_name, pname])
return getattr(self, widget_name)
|
[
"def",
"_widget_from_p",
"(",
"self",
",",
"pname",
",",
"ptype",
")",
":",
"widget_name",
"=",
"self",
".",
"type2widget",
"[",
"ptype",
"]",
".",
"__name__",
".",
"lower",
"(",
")",
"widget_name",
"=",
"\"_\"",
".",
"join",
"(",
"[",
"widget_name",
",",
"pname",
"]",
")",
"return",
"getattr",
"(",
"self",
",",
"widget_name",
")"
] |
Returns a widget from its ptype and pname
|
[
"Returns",
"a",
"widget",
"from",
"its",
"ptype",
"and",
"pname"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L373-L378
|
236,846
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CsvParameterWidgets.OnDialectChoice
|
def OnDialectChoice(self, event):
"""Updates all param widgets confirming to the selcted dialect"""
dialect_name = event.GetString()
value = list(self.choices['dialects']).index(dialect_name)
if dialect_name == 'sniffer':
if self.csvfilepath is None:
event.Skip()
return None
dialect, self.has_header = sniff(self.csvfilepath)
elif dialect_name == 'user':
event.Skip()
return None
else:
dialect = csv.get_dialect(dialect_name)
self._update_settings(dialect)
self.choice_dialects.SetValue(value)
|
python
|
def OnDialectChoice(self, event):
"""Updates all param widgets confirming to the selcted dialect"""
dialect_name = event.GetString()
value = list(self.choices['dialects']).index(dialect_name)
if dialect_name == 'sniffer':
if self.csvfilepath is None:
event.Skip()
return None
dialect, self.has_header = sniff(self.csvfilepath)
elif dialect_name == 'user':
event.Skip()
return None
else:
dialect = csv.get_dialect(dialect_name)
self._update_settings(dialect)
self.choice_dialects.SetValue(value)
|
[
"def",
"OnDialectChoice",
"(",
"self",
",",
"event",
")",
":",
"dialect_name",
"=",
"event",
".",
"GetString",
"(",
")",
"value",
"=",
"list",
"(",
"self",
".",
"choices",
"[",
"'dialects'",
"]",
")",
".",
"index",
"(",
"dialect_name",
")",
"if",
"dialect_name",
"==",
"'sniffer'",
":",
"if",
"self",
".",
"csvfilepath",
"is",
"None",
":",
"event",
".",
"Skip",
"(",
")",
"return",
"None",
"dialect",
",",
"self",
".",
"has_header",
"=",
"sniff",
"(",
"self",
".",
"csvfilepath",
")",
"elif",
"dialect_name",
"==",
"'user'",
":",
"event",
".",
"Skip",
"(",
")",
"return",
"None",
"else",
":",
"dialect",
"=",
"csv",
".",
"get_dialect",
"(",
"dialect_name",
")",
"self",
".",
"_update_settings",
"(",
"dialect",
")",
"self",
".",
"choice_dialects",
".",
"SetValue",
"(",
"value",
")"
] |
Updates all param widgets confirming to the selcted dialect
|
[
"Updates",
"all",
"param",
"widgets",
"confirming",
"to",
"the",
"selcted",
"dialect"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L386-L405
|
236,847
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CsvParameterWidgets.OnWidget
|
def OnWidget(self, event):
"""Update the dialect widget to 'user'"""
self.choice_dialects.SetValue(len(self.choices['dialects']) - 1)
event.Skip()
|
python
|
def OnWidget(self, event):
"""Update the dialect widget to 'user'"""
self.choice_dialects.SetValue(len(self.choices['dialects']) - 1)
event.Skip()
|
[
"def",
"OnWidget",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"choice_dialects",
".",
"SetValue",
"(",
"len",
"(",
"self",
".",
"choices",
"[",
"'dialects'",
"]",
")",
"-",
"1",
")",
"event",
".",
"Skip",
"(",
")"
] |
Update the dialect widget to 'user
|
[
"Update",
"the",
"dialect",
"widget",
"to",
"user"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L407-L411
|
236,848
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CsvParameterWidgets.get_dialect
|
def get_dialect(self):
"""Returns a new dialect that implements the current selection"""
parameters = {}
for parameter in self.csv_params[2:]:
pname, ptype, plabel, phelp = parameter
widget = self._widget_from_p(pname, ptype)
if ptype is types.StringType or ptype is types.UnicodeType:
parameters[pname] = str(widget.GetValue())
elif ptype is types.BooleanType:
parameters[pname] = widget.GetValue()
elif pname == 'quoting':
choice = self.choices['quoting'][widget.GetSelection()]
parameters[pname] = getattr(csv, choice)
else:
raise TypeError(_("{type} unknown.").format(type=ptype))
has_header = parameters.pop("self.has_header")
try:
csv.register_dialect('user', **parameters)
except TypeError, err:
msg = _("The dialect is invalid. \n "
"\nError message:\n{msg}").format(msg=err)
dlg = wx.MessageDialog(self.parent, msg, style=wx.ID_CANCEL)
dlg.ShowModal()
dlg.Destroy()
raise TypeError(err)
return csv.get_dialect('user'), has_header
|
python
|
def get_dialect(self):
"""Returns a new dialect that implements the current selection"""
parameters = {}
for parameter in self.csv_params[2:]:
pname, ptype, plabel, phelp = parameter
widget = self._widget_from_p(pname, ptype)
if ptype is types.StringType or ptype is types.UnicodeType:
parameters[pname] = str(widget.GetValue())
elif ptype is types.BooleanType:
parameters[pname] = widget.GetValue()
elif pname == 'quoting':
choice = self.choices['quoting'][widget.GetSelection()]
parameters[pname] = getattr(csv, choice)
else:
raise TypeError(_("{type} unknown.").format(type=ptype))
has_header = parameters.pop("self.has_header")
try:
csv.register_dialect('user', **parameters)
except TypeError, err:
msg = _("The dialect is invalid. \n "
"\nError message:\n{msg}").format(msg=err)
dlg = wx.MessageDialog(self.parent, msg, style=wx.ID_CANCEL)
dlg.ShowModal()
dlg.Destroy()
raise TypeError(err)
return csv.get_dialect('user'), has_header
|
[
"def",
"get_dialect",
"(",
"self",
")",
":",
"parameters",
"=",
"{",
"}",
"for",
"parameter",
"in",
"self",
".",
"csv_params",
"[",
"2",
":",
"]",
":",
"pname",
",",
"ptype",
",",
"plabel",
",",
"phelp",
"=",
"parameter",
"widget",
"=",
"self",
".",
"_widget_from_p",
"(",
"pname",
",",
"ptype",
")",
"if",
"ptype",
"is",
"types",
".",
"StringType",
"or",
"ptype",
"is",
"types",
".",
"UnicodeType",
":",
"parameters",
"[",
"pname",
"]",
"=",
"str",
"(",
"widget",
".",
"GetValue",
"(",
")",
")",
"elif",
"ptype",
"is",
"types",
".",
"BooleanType",
":",
"parameters",
"[",
"pname",
"]",
"=",
"widget",
".",
"GetValue",
"(",
")",
"elif",
"pname",
"==",
"'quoting'",
":",
"choice",
"=",
"self",
".",
"choices",
"[",
"'quoting'",
"]",
"[",
"widget",
".",
"GetSelection",
"(",
")",
"]",
"parameters",
"[",
"pname",
"]",
"=",
"getattr",
"(",
"csv",
",",
"choice",
")",
"else",
":",
"raise",
"TypeError",
"(",
"_",
"(",
"\"{type} unknown.\"",
")",
".",
"format",
"(",
"type",
"=",
"ptype",
")",
")",
"has_header",
"=",
"parameters",
".",
"pop",
"(",
"\"self.has_header\"",
")",
"try",
":",
"csv",
".",
"register_dialect",
"(",
"'user'",
",",
"*",
"*",
"parameters",
")",
"except",
"TypeError",
",",
"err",
":",
"msg",
"=",
"_",
"(",
"\"The dialect is invalid. \\n \"",
"\"\\nError message:\\n{msg}\"",
")",
".",
"format",
"(",
"msg",
"=",
"err",
")",
"dlg",
"=",
"wx",
".",
"MessageDialog",
"(",
"self",
".",
"parent",
",",
"msg",
",",
"style",
"=",
"wx",
".",
"ID_CANCEL",
")",
"dlg",
".",
"ShowModal",
"(",
")",
"dlg",
".",
"Destroy",
"(",
")",
"raise",
"TypeError",
"(",
"err",
")",
"return",
"csv",
".",
"get_dialect",
"(",
"'user'",
")",
",",
"has_header"
] |
Returns a new dialect that implements the current selection
|
[
"Returns",
"a",
"new",
"dialect",
"that",
"implements",
"the",
"current",
"selection"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L413-L446
|
236,849
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CSVPreviewGrid.OnMouse
|
def OnMouse(self, event):
"""Reduces clicks to enter an edit control"""
self.SetGridCursor(event.Row, event.Col)
self.EnableCellEditControl(True)
event.Skip()
|
python
|
def OnMouse(self, event):
"""Reduces clicks to enter an edit control"""
self.SetGridCursor(event.Row, event.Col)
self.EnableCellEditControl(True)
event.Skip()
|
[
"def",
"OnMouse",
"(",
"self",
",",
"event",
")",
":",
"self",
".",
"SetGridCursor",
"(",
"event",
".",
"Row",
",",
"event",
".",
"Col",
")",
"self",
".",
"EnableCellEditControl",
"(",
"True",
")",
"event",
".",
"Skip",
"(",
")"
] |
Reduces clicks to enter an edit control
|
[
"Reduces",
"clicks",
"to",
"enter",
"an",
"edit",
"control"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L487-L492
|
236,850
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CSVPreviewGrid.OnGridEditorCreated
|
def OnGridEditorCreated(self, event):
"""Used to capture Editor close events"""
editor = event.GetControl()
editor.Bind(wx.EVT_KILL_FOCUS, self.OnGridEditorClosed)
event.Skip()
|
python
|
def OnGridEditorCreated(self, event):
"""Used to capture Editor close events"""
editor = event.GetControl()
editor.Bind(wx.EVT_KILL_FOCUS, self.OnGridEditorClosed)
event.Skip()
|
[
"def",
"OnGridEditorCreated",
"(",
"self",
",",
"event",
")",
":",
"editor",
"=",
"event",
".",
"GetControl",
"(",
")",
"editor",
".",
"Bind",
"(",
"wx",
".",
"EVT_KILL_FOCUS",
",",
"self",
".",
"OnGridEditorClosed",
")",
"event",
".",
"Skip",
"(",
")"
] |
Used to capture Editor close events
|
[
"Used",
"to",
"capture",
"Editor",
"close",
"events"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L499-L505
|
236,851
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CSVPreviewGrid.OnGridEditorClosed
|
def OnGridEditorClosed(self, event):
"""Event handler for end of output type choice"""
try:
dialect, self.has_header = \
self.parent.csvwidgets.get_dialect()
except TypeError:
event.Skip()
return 0
self.fill_cells(dialect, self.has_header, choices=False)
|
python
|
def OnGridEditorClosed(self, event):
"""Event handler for end of output type choice"""
try:
dialect, self.has_header = \
self.parent.csvwidgets.get_dialect()
except TypeError:
event.Skip()
return 0
self.fill_cells(dialect, self.has_header, choices=False)
|
[
"def",
"OnGridEditorClosed",
"(",
"self",
",",
"event",
")",
":",
"try",
":",
"dialect",
",",
"self",
".",
"has_header",
"=",
"self",
".",
"parent",
".",
"csvwidgets",
".",
"get_dialect",
"(",
")",
"except",
"TypeError",
":",
"event",
".",
"Skip",
"(",
")",
"return",
"0",
"self",
".",
"fill_cells",
"(",
"dialect",
",",
"self",
".",
"has_header",
",",
"choices",
"=",
"False",
")"
] |
Event handler for end of output type choice
|
[
"Event",
"handler",
"for",
"end",
"of",
"output",
"type",
"choice"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L507-L517
|
236,852
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CSVPreviewGrid.get_digest_keys
|
def get_digest_keys(self):
"""Returns a list of the type choices"""
digest_keys = []
for col in xrange(self.GetNumberCols()):
digest_key = self.GetCellValue(self.has_header, col)
if digest_key == "":
digest_key = self.digest_types.keys()[0]
digest_keys.append(digest_key)
return digest_keys
|
python
|
def get_digest_keys(self):
"""Returns a list of the type choices"""
digest_keys = []
for col in xrange(self.GetNumberCols()):
digest_key = self.GetCellValue(self.has_header, col)
if digest_key == "":
digest_key = self.digest_types.keys()[0]
digest_keys.append(digest_key)
return digest_keys
|
[
"def",
"get_digest_keys",
"(",
"self",
")",
":",
"digest_keys",
"=",
"[",
"]",
"for",
"col",
"in",
"xrange",
"(",
"self",
".",
"GetNumberCols",
"(",
")",
")",
":",
"digest_key",
"=",
"self",
".",
"GetCellValue",
"(",
"self",
".",
"has_header",
",",
"col",
")",
"if",
"digest_key",
"==",
"\"\"",
":",
"digest_key",
"=",
"self",
".",
"digest_types",
".",
"keys",
"(",
")",
"[",
"0",
"]",
"digest_keys",
".",
"append",
"(",
"digest_key",
")",
"return",
"digest_keys"
] |
Returns a list of the type choices
|
[
"Returns",
"a",
"list",
"of",
"the",
"type",
"choices"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L580-L590
|
236,853
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CsvExportDialog.OnButtonApply
|
def OnButtonApply(self, event):
"""Updates the preview_textctrl"""
try:
dialect, self.has_header = self.csvwidgets.get_dialect()
except TypeError:
event.Skip()
return 0
self.preview_textctrl.fill(data=self.data, dialect=dialect)
event.Skip()
|
python
|
def OnButtonApply(self, event):
"""Updates the preview_textctrl"""
try:
dialect, self.has_header = self.csvwidgets.get_dialect()
except TypeError:
event.Skip()
return 0
self.preview_textctrl.fill(data=self.data, dialect=dialect)
event.Skip()
|
[
"def",
"OnButtonApply",
"(",
"self",
",",
"event",
")",
":",
"try",
":",
"dialect",
",",
"self",
".",
"has_header",
"=",
"self",
".",
"csvwidgets",
".",
"get_dialect",
"(",
")",
"except",
"TypeError",
":",
"event",
".",
"Skip",
"(",
")",
"return",
"0",
"self",
".",
"preview_textctrl",
".",
"fill",
"(",
"data",
"=",
"self",
".",
"data",
",",
"dialect",
"=",
"dialect",
")",
"event",
".",
"Skip",
"(",
")"
] |
Updates the preview_textctrl
|
[
"Updates",
"the",
"preview_textctrl"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L785-L796
|
236,854
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
MacroPanel._set_properties
|
def _set_properties(self):
"""Setup title, size and tooltips"""
self.codetext_ctrl.SetToolTipString(_("Enter python code here."))
self.apply_button.SetToolTipString(_("Apply changes to current macro"))
self.splitter.SetBackgroundStyle(wx.BG_STYLE_COLOUR)
self.result_ctrl.SetMinSize((10, 10))
|
python
|
def _set_properties(self):
"""Setup title, size and tooltips"""
self.codetext_ctrl.SetToolTipString(_("Enter python code here."))
self.apply_button.SetToolTipString(_("Apply changes to current macro"))
self.splitter.SetBackgroundStyle(wx.BG_STYLE_COLOUR)
self.result_ctrl.SetMinSize((10, 10))
|
[
"def",
"_set_properties",
"(",
"self",
")",
":",
"self",
".",
"codetext_ctrl",
".",
"SetToolTipString",
"(",
"_",
"(",
"\"Enter python code here.\"",
")",
")",
"self",
".",
"apply_button",
".",
"SetToolTipString",
"(",
"_",
"(",
"\"Apply changes to current macro\"",
")",
")",
"self",
".",
"splitter",
".",
"SetBackgroundStyle",
"(",
"wx",
".",
"BG_STYLE_COLOUR",
")",
"self",
".",
"result_ctrl",
".",
"SetMinSize",
"(",
"(",
"10",
",",
"10",
")",
")"
] |
Setup title, size and tooltips
|
[
"Setup",
"title",
"size",
"and",
"tooltips"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L868-L874
|
236,855
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
MacroPanel.OnApply
|
def OnApply(self, event):
"""Event handler for Apply button"""
# See if we have valid python
try:
ast.parse(self.macros)
except:
# Grab the traceback and print it for the user
s = StringIO()
e = exc_info()
# usr_tb will more than likely be none because ast throws
# SytnaxErrorsas occurring outside of the current
# execution frame
usr_tb = get_user_codeframe(e[2]) or None
print_exception(e[0], e[1], usr_tb, None, s)
post_command_event(self.parent, self.MacroErrorMsg,
err=s.getvalue())
success = False
else:
self.result_ctrl.SetValue('')
post_command_event(self.parent, self.MacroReplaceMsg,
macros=self.macros)
post_command_event(self.parent, self.MacroExecuteMsg)
success = True
event.Skip()
return success
|
python
|
def OnApply(self, event):
"""Event handler for Apply button"""
# See if we have valid python
try:
ast.parse(self.macros)
except:
# Grab the traceback and print it for the user
s = StringIO()
e = exc_info()
# usr_tb will more than likely be none because ast throws
# SytnaxErrorsas occurring outside of the current
# execution frame
usr_tb = get_user_codeframe(e[2]) or None
print_exception(e[0], e[1], usr_tb, None, s)
post_command_event(self.parent, self.MacroErrorMsg,
err=s.getvalue())
success = False
else:
self.result_ctrl.SetValue('')
post_command_event(self.parent, self.MacroReplaceMsg,
macros=self.macros)
post_command_event(self.parent, self.MacroExecuteMsg)
success = True
event.Skip()
return success
|
[
"def",
"OnApply",
"(",
"self",
",",
"event",
")",
":",
"# See if we have valid python",
"try",
":",
"ast",
".",
"parse",
"(",
"self",
".",
"macros",
")",
"except",
":",
"# Grab the traceback and print it for the user",
"s",
"=",
"StringIO",
"(",
")",
"e",
"=",
"exc_info",
"(",
")",
"# usr_tb will more than likely be none because ast throws",
"# SytnaxErrorsas occurring outside of the current",
"# execution frame",
"usr_tb",
"=",
"get_user_codeframe",
"(",
"e",
"[",
"2",
"]",
")",
"or",
"None",
"print_exception",
"(",
"e",
"[",
"0",
"]",
",",
"e",
"[",
"1",
"]",
",",
"usr_tb",
",",
"None",
",",
"s",
")",
"post_command_event",
"(",
"self",
".",
"parent",
",",
"self",
".",
"MacroErrorMsg",
",",
"err",
"=",
"s",
".",
"getvalue",
"(",
")",
")",
"success",
"=",
"False",
"else",
":",
"self",
".",
"result_ctrl",
".",
"SetValue",
"(",
"''",
")",
"post_command_event",
"(",
"self",
".",
"parent",
",",
"self",
".",
"MacroReplaceMsg",
",",
"macros",
"=",
"self",
".",
"macros",
")",
"post_command_event",
"(",
"self",
".",
"parent",
",",
"self",
".",
"MacroExecuteMsg",
")",
"success",
"=",
"True",
"event",
".",
"Skip",
"(",
")",
"return",
"success"
] |
Event handler for Apply button
|
[
"Event",
"handler",
"for",
"Apply",
"button"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L881-L907
|
236,856
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
MacroPanel.update_result_ctrl
|
def update_result_ctrl(self, event):
"""Update event result following execution by main window"""
# Check to see if macro window still exists
if not self:
return
printLen = 0
self.result_ctrl.SetValue('')
if hasattr(event, 'msg'):
# Output of script (from print statements, for example)
self.result_ctrl.AppendText(event.msg)
printLen = len(event.msg)
if hasattr(event, 'err'):
# Error messages
errLen = len(event.err)
errStyle = wx.TextAttr(wx.RED)
self.result_ctrl.AppendText(event.err)
self.result_ctrl.SetStyle(printLen, printLen+errLen, errStyle)
if not hasattr(event, 'err') or event.err == '':
# No error passed. Close dialog if user requested it.
if self._ok_pressed:
self.Destroy()
self._ok_pressed = False
|
python
|
def update_result_ctrl(self, event):
"""Update event result following execution by main window"""
# Check to see if macro window still exists
if not self:
return
printLen = 0
self.result_ctrl.SetValue('')
if hasattr(event, 'msg'):
# Output of script (from print statements, for example)
self.result_ctrl.AppendText(event.msg)
printLen = len(event.msg)
if hasattr(event, 'err'):
# Error messages
errLen = len(event.err)
errStyle = wx.TextAttr(wx.RED)
self.result_ctrl.AppendText(event.err)
self.result_ctrl.SetStyle(printLen, printLen+errLen, errStyle)
if not hasattr(event, 'err') or event.err == '':
# No error passed. Close dialog if user requested it.
if self._ok_pressed:
self.Destroy()
self._ok_pressed = False
|
[
"def",
"update_result_ctrl",
"(",
"self",
",",
"event",
")",
":",
"# Check to see if macro window still exists",
"if",
"not",
"self",
":",
"return",
"printLen",
"=",
"0",
"self",
".",
"result_ctrl",
".",
"SetValue",
"(",
"''",
")",
"if",
"hasattr",
"(",
"event",
",",
"'msg'",
")",
":",
"# Output of script (from print statements, for example)",
"self",
".",
"result_ctrl",
".",
"AppendText",
"(",
"event",
".",
"msg",
")",
"printLen",
"=",
"len",
"(",
"event",
".",
"msg",
")",
"if",
"hasattr",
"(",
"event",
",",
"'err'",
")",
":",
"# Error messages",
"errLen",
"=",
"len",
"(",
"event",
".",
"err",
")",
"errStyle",
"=",
"wx",
".",
"TextAttr",
"(",
"wx",
".",
"RED",
")",
"self",
".",
"result_ctrl",
".",
"AppendText",
"(",
"event",
".",
"err",
")",
"self",
".",
"result_ctrl",
".",
"SetStyle",
"(",
"printLen",
",",
"printLen",
"+",
"errLen",
",",
"errStyle",
")",
"if",
"not",
"hasattr",
"(",
"event",
",",
"'err'",
")",
"or",
"event",
".",
"err",
"==",
"''",
":",
"# No error passed. Close dialog if user requested it.",
"if",
"self",
".",
"_ok_pressed",
":",
"self",
".",
"Destroy",
"(",
")",
"self",
".",
"_ok_pressed",
"=",
"False"
] |
Update event result following execution by main window
|
[
"Update",
"event",
"result",
"following",
"execution",
"by",
"main",
"window"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L909-L933
|
236,857
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
DimensionsEntryDialog._ondim
|
def _ondim(self, dimension, valuestring):
"""Converts valuestring to int and assigns result to self.dim
If there is an error (such as an empty valuestring) or if
the value is < 1, the value 1 is assigned to self.dim
Parameters
----------
dimension: int
\tDimension that is to be updated. Must be in [1:4]
valuestring: string
\t A string that can be converted to an int
"""
try:
self.dimensions[dimension] = int(valuestring)
except ValueError:
self.dimensions[dimension] = 1
self.textctrls[dimension].SetValue(str(1))
if self.dimensions[dimension] < 1:
self.dimensions[dimension] = 1
self.textctrls[dimension].SetValue(str(1))
|
python
|
def _ondim(self, dimension, valuestring):
"""Converts valuestring to int and assigns result to self.dim
If there is an error (such as an empty valuestring) or if
the value is < 1, the value 1 is assigned to self.dim
Parameters
----------
dimension: int
\tDimension that is to be updated. Must be in [1:4]
valuestring: string
\t A string that can be converted to an int
"""
try:
self.dimensions[dimension] = int(valuestring)
except ValueError:
self.dimensions[dimension] = 1
self.textctrls[dimension].SetValue(str(1))
if self.dimensions[dimension] < 1:
self.dimensions[dimension] = 1
self.textctrls[dimension].SetValue(str(1))
|
[
"def",
"_ondim",
"(",
"self",
",",
"dimension",
",",
"valuestring",
")",
":",
"try",
":",
"self",
".",
"dimensions",
"[",
"dimension",
"]",
"=",
"int",
"(",
"valuestring",
")",
"except",
"ValueError",
":",
"self",
".",
"dimensions",
"[",
"dimension",
"]",
"=",
"1",
"self",
".",
"textctrls",
"[",
"dimension",
"]",
".",
"SetValue",
"(",
"str",
"(",
"1",
")",
")",
"if",
"self",
".",
"dimensions",
"[",
"dimension",
"]",
"<",
"1",
":",
"self",
".",
"dimensions",
"[",
"dimension",
"]",
"=",
"1",
"self",
".",
"textctrls",
"[",
"dimension",
"]",
".",
"SetValue",
"(",
"str",
"(",
"1",
")",
")"
] |
Converts valuestring to int and assigns result to self.dim
If there is an error (such as an empty valuestring) or if
the value is < 1, the value 1 is assigned to self.dim
Parameters
----------
dimension: int
\tDimension that is to be updated. Must be in [1:4]
valuestring: string
\t A string that can be converted to an int
|
[
"Converts",
"valuestring",
"to",
"int",
"and",
"assigns",
"result",
"to",
"self",
".",
"dim"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L1001-L1025
|
236,858
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
CellEntryDialog.OnOk
|
def OnOk(self, event):
"""Posts a command event that makes the grid show the entered cell"""
# Get key values from textctrls
key_strings = [self.row_textctrl.GetValue(),
self.col_textctrl.GetValue(),
self.tab_textctrl.GetValue()]
key = []
for key_string in key_strings:
try:
key.append(int(key_string))
except ValueError:
key.append(0)
# Post event
post_command_event(self.parent, self.GotoCellMsg, key=tuple(key))
|
python
|
def OnOk(self, event):
"""Posts a command event that makes the grid show the entered cell"""
# Get key values from textctrls
key_strings = [self.row_textctrl.GetValue(),
self.col_textctrl.GetValue(),
self.tab_textctrl.GetValue()]
key = []
for key_string in key_strings:
try:
key.append(int(key_string))
except ValueError:
key.append(0)
# Post event
post_command_event(self.parent, self.GotoCellMsg, key=tuple(key))
|
[
"def",
"OnOk",
"(",
"self",
",",
"event",
")",
":",
"# Get key values from textctrls",
"key_strings",
"=",
"[",
"self",
".",
"row_textctrl",
".",
"GetValue",
"(",
")",
",",
"self",
".",
"col_textctrl",
".",
"GetValue",
"(",
")",
",",
"self",
".",
"tab_textctrl",
".",
"GetValue",
"(",
")",
"]",
"key",
"=",
"[",
"]",
"for",
"key_string",
"in",
"key_strings",
":",
"try",
":",
"key",
".",
"append",
"(",
"int",
"(",
"key_string",
")",
")",
"except",
"ValueError",
":",
"key",
".",
"append",
"(",
"0",
")",
"# Post event",
"post_command_event",
"(",
"self",
".",
"parent",
",",
"self",
".",
"GotoCellMsg",
",",
"key",
"=",
"tuple",
"(",
"key",
")",
")"
] |
Posts a command event that makes the grid show the entered cell
|
[
"Posts",
"a",
"command",
"event",
"that",
"makes",
"the",
"grid",
"show",
"the",
"entered",
"cell"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L1106-L1125
|
236,859
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
AboutDialog._set_properties
|
def _set_properties(self):
"""Setup title and label"""
self.SetTitle(_("About pyspread"))
label = _("pyspread {version}\nCopyright Martin Manns")
label = label.format(version=VERSION)
self.about_label.SetLabel(label)
|
python
|
def _set_properties(self):
"""Setup title and label"""
self.SetTitle(_("About pyspread"))
label = _("pyspread {version}\nCopyright Martin Manns")
label = label.format(version=VERSION)
self.about_label.SetLabel(label)
|
[
"def",
"_set_properties",
"(",
"self",
")",
":",
"self",
".",
"SetTitle",
"(",
"_",
"(",
"\"About pyspread\"",
")",
")",
"label",
"=",
"_",
"(",
"\"pyspread {version}\\nCopyright Martin Manns\"",
")",
"label",
"=",
"label",
".",
"format",
"(",
"version",
"=",
"VERSION",
")",
"self",
".",
"about_label",
".",
"SetLabel",
"(",
"label",
")"
] |
Setup title and label
|
[
"Setup",
"title",
"and",
"label"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L1163-L1171
|
236,860
|
manns/pyspread
|
pyspread/src/gui/_dialogs.py
|
PasteAsDialog.get_max_dim
|
def get_max_dim(self, obj):
"""Returns maximum dimensionality over which obj is iterable <= 2"""
try:
iter(obj)
except TypeError:
return 0
try:
for o in obj:
iter(o)
break
except TypeError:
return 1
return 2
|
python
|
def get_max_dim(self, obj):
"""Returns maximum dimensionality over which obj is iterable <= 2"""
try:
iter(obj)
except TypeError:
return 0
try:
for o in obj:
iter(o)
break
except TypeError:
return 1
return 2
|
[
"def",
"get_max_dim",
"(",
"self",
",",
"obj",
")",
":",
"try",
":",
"iter",
"(",
"obj",
")",
"except",
"TypeError",
":",
"return",
"0",
"try",
":",
"for",
"o",
"in",
"obj",
":",
"iter",
"(",
"o",
")",
"break",
"except",
"TypeError",
":",
"return",
"1",
"return",
"2"
] |
Returns maximum dimensionality over which obj is iterable <= 2
|
[
"Returns",
"maximum",
"dimensionality",
"over",
"which",
"obj",
"is",
"iterable",
"<",
"=",
"2"
] |
0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0
|
https://github.com/manns/pyspread/blob/0e2fd44c2e0f06605efc3058c20a43a8c1f9e7e0/pyspread/src/gui/_dialogs.py#L1473-L1490
|
236,861
|
dgraph-io/pydgraph
|
pydgraph/client_stub.py
|
DgraphClientStub.alter
|
def alter(self, operation, timeout=None, metadata=None, credentials=None):
"""Runs alter operation."""
return self.stub.Alter(operation, timeout=timeout, metadata=metadata,
credentials=credentials)
|
python
|
def alter(self, operation, timeout=None, metadata=None, credentials=None):
"""Runs alter operation."""
return self.stub.Alter(operation, timeout=timeout, metadata=metadata,
credentials=credentials)
|
[
"def",
"alter",
"(",
"self",
",",
"operation",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"return",
"self",
".",
"stub",
".",
"Alter",
"(",
"operation",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
"credentials",
"=",
"credentials",
")"
] |
Runs alter operation.
|
[
"Runs",
"alter",
"operation",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/client_stub.py#L43-L46
|
236,862
|
dgraph-io/pydgraph
|
pydgraph/client_stub.py
|
DgraphClientStub.query
|
def query(self, req, timeout=None, metadata=None, credentials=None):
"""Runs query operation."""
return self.stub.Query(req, timeout=timeout, metadata=metadata,
credentials=credentials)
|
python
|
def query(self, req, timeout=None, metadata=None, credentials=None):
"""Runs query operation."""
return self.stub.Query(req, timeout=timeout, metadata=metadata,
credentials=credentials)
|
[
"def",
"query",
"(",
"self",
",",
"req",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"return",
"self",
".",
"stub",
".",
"Query",
"(",
"req",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
"credentials",
"=",
"credentials",
")"
] |
Runs query operation.
|
[
"Runs",
"query",
"operation",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/client_stub.py#L48-L51
|
236,863
|
dgraph-io/pydgraph
|
pydgraph/client_stub.py
|
DgraphClientStub.mutate
|
def mutate(self, mutation, timeout=None, metadata=None, credentials=None):
"""Runs mutate operation."""
return self.stub.Mutate(mutation, timeout=timeout, metadata=metadata,
credentials=credentials)
|
python
|
def mutate(self, mutation, timeout=None, metadata=None, credentials=None):
"""Runs mutate operation."""
return self.stub.Mutate(mutation, timeout=timeout, metadata=metadata,
credentials=credentials)
|
[
"def",
"mutate",
"(",
"self",
",",
"mutation",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"return",
"self",
".",
"stub",
".",
"Mutate",
"(",
"mutation",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
"credentials",
"=",
"credentials",
")"
] |
Runs mutate operation.
|
[
"Runs",
"mutate",
"operation",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/client_stub.py#L53-L56
|
236,864
|
dgraph-io/pydgraph
|
pydgraph/client_stub.py
|
DgraphClientStub.commit_or_abort
|
def commit_or_abort(self, ctx, timeout=None, metadata=None,
credentials=None):
"""Runs commit or abort operation."""
return self.stub.CommitOrAbort(ctx, timeout=timeout, metadata=metadata,
credentials=credentials)
|
python
|
def commit_or_abort(self, ctx, timeout=None, metadata=None,
credentials=None):
"""Runs commit or abort operation."""
return self.stub.CommitOrAbort(ctx, timeout=timeout, metadata=metadata,
credentials=credentials)
|
[
"def",
"commit_or_abort",
"(",
"self",
",",
"ctx",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"return",
"self",
".",
"stub",
".",
"CommitOrAbort",
"(",
"ctx",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
"credentials",
"=",
"credentials",
")"
] |
Runs commit or abort operation.
|
[
"Runs",
"commit",
"or",
"abort",
"operation",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/client_stub.py#L58-L62
|
236,865
|
dgraph-io/pydgraph
|
pydgraph/client_stub.py
|
DgraphClientStub.check_version
|
def check_version(self, check, timeout=None, metadata=None,
credentials=None):
"""Returns the version of the Dgraph instance."""
return self.stub.CheckVersion(check, timeout=timeout,
metadata=metadata,
credentials=credentials)
|
python
|
def check_version(self, check, timeout=None, metadata=None,
credentials=None):
"""Returns the version of the Dgraph instance."""
return self.stub.CheckVersion(check, timeout=timeout,
metadata=metadata,
credentials=credentials)
|
[
"def",
"check_version",
"(",
"self",
",",
"check",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"return",
"self",
".",
"stub",
".",
"CheckVersion",
"(",
"check",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
"credentials",
"=",
"credentials",
")"
] |
Returns the version of the Dgraph instance.
|
[
"Returns",
"the",
"version",
"of",
"the",
"Dgraph",
"instance",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/client_stub.py#L64-L69
|
236,866
|
dgraph-io/pydgraph
|
pydgraph/client.py
|
DgraphClient.alter
|
def alter(self, operation, timeout=None, metadata=None, credentials=None):
"""Runs a modification via this client."""
new_metadata = self.add_login_metadata(metadata)
try:
return self.any_client().alter(operation, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
if util.is_jwt_expired(error):
self.retry_login()
new_metadata = self.add_login_metadata(metadata)
return self.any_client().alter(operation, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
else:
raise error
|
python
|
def alter(self, operation, timeout=None, metadata=None, credentials=None):
"""Runs a modification via this client."""
new_metadata = self.add_login_metadata(metadata)
try:
return self.any_client().alter(operation, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
if util.is_jwt_expired(error):
self.retry_login()
new_metadata = self.add_login_metadata(metadata)
return self.any_client().alter(operation, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
else:
raise error
|
[
"def",
"alter",
"(",
"self",
",",
"operation",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"new_metadata",
"=",
"self",
".",
"add_login_metadata",
"(",
"metadata",
")",
"try",
":",
"return",
"self",
".",
"any_client",
"(",
")",
".",
"alter",
"(",
"operation",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"new_metadata",
",",
"credentials",
"=",
"credentials",
")",
"except",
"Exception",
"as",
"error",
":",
"if",
"util",
".",
"is_jwt_expired",
"(",
"error",
")",
":",
"self",
".",
"retry_login",
"(",
")",
"new_metadata",
"=",
"self",
".",
"add_login_metadata",
"(",
"metadata",
")",
"return",
"self",
".",
"any_client",
"(",
")",
".",
"alter",
"(",
"operation",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"new_metadata",
",",
"credentials",
"=",
"credentials",
")",
"else",
":",
"raise",
"error"
] |
Runs a modification via this client.
|
[
"Runs",
"a",
"modification",
"via",
"this",
"client",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/client.py#L71-L87
|
236,867
|
dgraph-io/pydgraph
|
pydgraph/client.py
|
DgraphClient.txn
|
def txn(self, read_only=False, best_effort=False):
"""Creates a transaction."""
return txn.Txn(self, read_only=read_only, best_effort=best_effort)
|
python
|
def txn(self, read_only=False, best_effort=False):
"""Creates a transaction."""
return txn.Txn(self, read_only=read_only, best_effort=best_effort)
|
[
"def",
"txn",
"(",
"self",
",",
"read_only",
"=",
"False",
",",
"best_effort",
"=",
"False",
")",
":",
"return",
"txn",
".",
"Txn",
"(",
"self",
",",
"read_only",
"=",
"read_only",
",",
"best_effort",
"=",
"best_effort",
")"
] |
Creates a transaction.
|
[
"Creates",
"a",
"transaction",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/client.py#L89-L91
|
236,868
|
dgraph-io/pydgraph
|
pydgraph/txn.py
|
Txn.query
|
def query(self, query, variables=None, timeout=None, metadata=None,
credentials=None):
"""Adds a query operation to the transaction."""
new_metadata = self._dg.add_login_metadata(metadata)
req = self._common_query(query, variables=variables)
try:
res = self._dc.query(req, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
if util.is_jwt_expired(error):
self._dg.retry_login()
new_metadata = self._dg.add_login_metadata(metadata)
res = self._dc.query(req, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
else:
raise error
self.merge_context(res.txn)
return res
|
python
|
def query(self, query, variables=None, timeout=None, metadata=None,
credentials=None):
"""Adds a query operation to the transaction."""
new_metadata = self._dg.add_login_metadata(metadata)
req = self._common_query(query, variables=variables)
try:
res = self._dc.query(req, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
if util.is_jwt_expired(error):
self._dg.retry_login()
new_metadata = self._dg.add_login_metadata(metadata)
res = self._dc.query(req, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
else:
raise error
self.merge_context(res.txn)
return res
|
[
"def",
"query",
"(",
"self",
",",
"query",
",",
"variables",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"new_metadata",
"=",
"self",
".",
"_dg",
".",
"add_login_metadata",
"(",
"metadata",
")",
"req",
"=",
"self",
".",
"_common_query",
"(",
"query",
",",
"variables",
"=",
"variables",
")",
"try",
":",
"res",
"=",
"self",
".",
"_dc",
".",
"query",
"(",
"req",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"new_metadata",
",",
"credentials",
"=",
"credentials",
")",
"except",
"Exception",
"as",
"error",
":",
"if",
"util",
".",
"is_jwt_expired",
"(",
"error",
")",
":",
"self",
".",
"_dg",
".",
"retry_login",
"(",
")",
"new_metadata",
"=",
"self",
".",
"_dg",
".",
"add_login_metadata",
"(",
"metadata",
")",
"res",
"=",
"self",
".",
"_dc",
".",
"query",
"(",
"req",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"new_metadata",
",",
"credentials",
"=",
"credentials",
")",
"else",
":",
"raise",
"error",
"self",
".",
"merge_context",
"(",
"res",
".",
"txn",
")",
"return",
"res"
] |
Adds a query operation to the transaction.
|
[
"Adds",
"a",
"query",
"operation",
"to",
"the",
"transaction",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/txn.py#L65-L85
|
236,869
|
dgraph-io/pydgraph
|
pydgraph/txn.py
|
Txn.mutate
|
def mutate(self, mutation=None, set_obj=None, del_obj=None, set_nquads=None,
del_nquads=None, commit_now=None, ignore_index_conflict=None,
timeout=None, metadata=None, credentials=None):
"""Adds a mutate operation to the transaction."""
mutation = self._common_mutate(
mutation=mutation, set_obj=set_obj, del_obj=del_obj,
set_nquads=set_nquads, del_nquads=del_nquads,
commit_now=commit_now, ignore_index_conflict=ignore_index_conflict)
new_metadata = self._dg.add_login_metadata(metadata)
mutate_error = None
try:
assigned = self._dc.mutate(mutation, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
if util.is_jwt_expired(error):
self._dg.retry_login()
new_metadata = self._dg.add_login_metadata(metadata)
try:
assigned = self._dc.mutate(mutation, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
mutate_error = error
else:
mutate_error = error
if mutate_error is not None:
try:
self.discard(timeout=timeout, metadata=metadata,
credentials=credentials)
except:
# Ignore error - user should see the original error.
pass
self._common_except_mutate(mutate_error)
if mutation.commit_now:
self._finished = True
self.merge_context(assigned.context)
return assigned
|
python
|
def mutate(self, mutation=None, set_obj=None, del_obj=None, set_nquads=None,
del_nquads=None, commit_now=None, ignore_index_conflict=None,
timeout=None, metadata=None, credentials=None):
"""Adds a mutate operation to the transaction."""
mutation = self._common_mutate(
mutation=mutation, set_obj=set_obj, del_obj=del_obj,
set_nquads=set_nquads, del_nquads=del_nquads,
commit_now=commit_now, ignore_index_conflict=ignore_index_conflict)
new_metadata = self._dg.add_login_metadata(metadata)
mutate_error = None
try:
assigned = self._dc.mutate(mutation, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
if util.is_jwt_expired(error):
self._dg.retry_login()
new_metadata = self._dg.add_login_metadata(metadata)
try:
assigned = self._dc.mutate(mutation, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
mutate_error = error
else:
mutate_error = error
if mutate_error is not None:
try:
self.discard(timeout=timeout, metadata=metadata,
credentials=credentials)
except:
# Ignore error - user should see the original error.
pass
self._common_except_mutate(mutate_error)
if mutation.commit_now:
self._finished = True
self.merge_context(assigned.context)
return assigned
|
[
"def",
"mutate",
"(",
"self",
",",
"mutation",
"=",
"None",
",",
"set_obj",
"=",
"None",
",",
"del_obj",
"=",
"None",
",",
"set_nquads",
"=",
"None",
",",
"del_nquads",
"=",
"None",
",",
"commit_now",
"=",
"None",
",",
"ignore_index_conflict",
"=",
"None",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"mutation",
"=",
"self",
".",
"_common_mutate",
"(",
"mutation",
"=",
"mutation",
",",
"set_obj",
"=",
"set_obj",
",",
"del_obj",
"=",
"del_obj",
",",
"set_nquads",
"=",
"set_nquads",
",",
"del_nquads",
"=",
"del_nquads",
",",
"commit_now",
"=",
"commit_now",
",",
"ignore_index_conflict",
"=",
"ignore_index_conflict",
")",
"new_metadata",
"=",
"self",
".",
"_dg",
".",
"add_login_metadata",
"(",
"metadata",
")",
"mutate_error",
"=",
"None",
"try",
":",
"assigned",
"=",
"self",
".",
"_dc",
".",
"mutate",
"(",
"mutation",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"new_metadata",
",",
"credentials",
"=",
"credentials",
")",
"except",
"Exception",
"as",
"error",
":",
"if",
"util",
".",
"is_jwt_expired",
"(",
"error",
")",
":",
"self",
".",
"_dg",
".",
"retry_login",
"(",
")",
"new_metadata",
"=",
"self",
".",
"_dg",
".",
"add_login_metadata",
"(",
"metadata",
")",
"try",
":",
"assigned",
"=",
"self",
".",
"_dc",
".",
"mutate",
"(",
"mutation",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"new_metadata",
",",
"credentials",
"=",
"credentials",
")",
"except",
"Exception",
"as",
"error",
":",
"mutate_error",
"=",
"error",
"else",
":",
"mutate_error",
"=",
"error",
"if",
"mutate_error",
"is",
"not",
"None",
":",
"try",
":",
"self",
".",
"discard",
"(",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"metadata",
",",
"credentials",
"=",
"credentials",
")",
"except",
":",
"# Ignore error - user should see the original error.",
"pass",
"self",
".",
"_common_except_mutate",
"(",
"mutate_error",
")",
"if",
"mutation",
".",
"commit_now",
":",
"self",
".",
"_finished",
"=",
"True",
"self",
".",
"merge_context",
"(",
"assigned",
".",
"context",
")",
"return",
"assigned"
] |
Adds a mutate operation to the transaction.
|
[
"Adds",
"a",
"mutate",
"operation",
"to",
"the",
"transaction",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/txn.py#L104-L147
|
236,870
|
dgraph-io/pydgraph
|
pydgraph/txn.py
|
Txn.commit
|
def commit(self, timeout=None, metadata=None, credentials=None):
"""Commits the transaction."""
if not self._common_commit():
return
new_metadata = self._dg.add_login_metadata(metadata)
try:
self._dc.commit_or_abort(self._ctx, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
if util.is_jwt_expired(error):
self._dg.retry_login()
new_metadata = self._dg.add_login_metadata(metadata)
try:
self._dc.commit_or_abort(self._ctx, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
return self._common_except_commit(error)
self._common_except_commit(error)
|
python
|
def commit(self, timeout=None, metadata=None, credentials=None):
"""Commits the transaction."""
if not self._common_commit():
return
new_metadata = self._dg.add_login_metadata(metadata)
try:
self._dc.commit_or_abort(self._ctx, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
if util.is_jwt_expired(error):
self._dg.retry_login()
new_metadata = self._dg.add_login_metadata(metadata)
try:
self._dc.commit_or_abort(self._ctx, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
return self._common_except_commit(error)
self._common_except_commit(error)
|
[
"def",
"commit",
"(",
"self",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"_common_commit",
"(",
")",
":",
"return",
"new_metadata",
"=",
"self",
".",
"_dg",
".",
"add_login_metadata",
"(",
"metadata",
")",
"try",
":",
"self",
".",
"_dc",
".",
"commit_or_abort",
"(",
"self",
".",
"_ctx",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"new_metadata",
",",
"credentials",
"=",
"credentials",
")",
"except",
"Exception",
"as",
"error",
":",
"if",
"util",
".",
"is_jwt_expired",
"(",
"error",
")",
":",
"self",
".",
"_dg",
".",
"retry_login",
"(",
")",
"new_metadata",
"=",
"self",
".",
"_dg",
".",
"add_login_metadata",
"(",
"metadata",
")",
"try",
":",
"self",
".",
"_dc",
".",
"commit_or_abort",
"(",
"self",
".",
"_ctx",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"new_metadata",
",",
"credentials",
"=",
"credentials",
")",
"except",
"Exception",
"as",
"error",
":",
"return",
"self",
".",
"_common_except_commit",
"(",
"error",
")",
"self",
".",
"_common_except_commit",
"(",
"error",
")"
] |
Commits the transaction.
|
[
"Commits",
"the",
"transaction",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/txn.py#L188-L209
|
236,871
|
dgraph-io/pydgraph
|
pydgraph/txn.py
|
Txn.discard
|
def discard(self, timeout=None, metadata=None, credentials=None):
"""Discards the transaction."""
if not self._common_discard():
return
new_metadata = self._dg.add_login_metadata(metadata)
try:
self._dc.commit_or_abort(self._ctx, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
if util.is_jwt_expired(error):
self._dg.retry_login()
new_metadata = self._dg.add_login_metadata(metadata)
self._dc.commit_or_abort(self._ctx, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
else:
raise error
|
python
|
def discard(self, timeout=None, metadata=None, credentials=None):
"""Discards the transaction."""
if not self._common_discard():
return
new_metadata = self._dg.add_login_metadata(metadata)
try:
self._dc.commit_or_abort(self._ctx, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
except Exception as error:
if util.is_jwt_expired(error):
self._dg.retry_login()
new_metadata = self._dg.add_login_metadata(metadata)
self._dc.commit_or_abort(self._ctx, timeout=timeout,
metadata=new_metadata,
credentials=credentials)
else:
raise error
|
[
"def",
"discard",
"(",
"self",
",",
"timeout",
"=",
"None",
",",
"metadata",
"=",
"None",
",",
"credentials",
"=",
"None",
")",
":",
"if",
"not",
"self",
".",
"_common_discard",
"(",
")",
":",
"return",
"new_metadata",
"=",
"self",
".",
"_dg",
".",
"add_login_metadata",
"(",
"metadata",
")",
"try",
":",
"self",
".",
"_dc",
".",
"commit_or_abort",
"(",
"self",
".",
"_ctx",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"new_metadata",
",",
"credentials",
"=",
"credentials",
")",
"except",
"Exception",
"as",
"error",
":",
"if",
"util",
".",
"is_jwt_expired",
"(",
"error",
")",
":",
"self",
".",
"_dg",
".",
"retry_login",
"(",
")",
"new_metadata",
"=",
"self",
".",
"_dg",
".",
"add_login_metadata",
"(",
"metadata",
")",
"self",
".",
"_dc",
".",
"commit_or_abort",
"(",
"self",
".",
"_ctx",
",",
"timeout",
"=",
"timeout",
",",
"metadata",
"=",
"new_metadata",
",",
"credentials",
"=",
"credentials",
")",
"else",
":",
"raise",
"error"
] |
Discards the transaction.
|
[
"Discards",
"the",
"transaction",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/txn.py#L232-L250
|
236,872
|
dgraph-io/pydgraph
|
pydgraph/txn.py
|
Txn.merge_context
|
def merge_context(self, src=None):
"""Merges context from this instance with src."""
if src is None:
# This condition will be true only if the server doesn't return a
# txn context after a query or mutation.
return
if self._ctx.start_ts == 0:
self._ctx.start_ts = src.start_ts
elif self._ctx.start_ts != src.start_ts:
# This condition should never be true.
raise Exception('StartTs mismatch')
self._ctx.keys.extend(src.keys)
self._ctx.preds.extend(src.preds)
|
python
|
def merge_context(self, src=None):
"""Merges context from this instance with src."""
if src is None:
# This condition will be true only if the server doesn't return a
# txn context after a query or mutation.
return
if self._ctx.start_ts == 0:
self._ctx.start_ts = src.start_ts
elif self._ctx.start_ts != src.start_ts:
# This condition should never be true.
raise Exception('StartTs mismatch')
self._ctx.keys.extend(src.keys)
self._ctx.preds.extend(src.preds)
|
[
"def",
"merge_context",
"(",
"self",
",",
"src",
"=",
"None",
")",
":",
"if",
"src",
"is",
"None",
":",
"# This condition will be true only if the server doesn't return a",
"# txn context after a query or mutation.",
"return",
"if",
"self",
".",
"_ctx",
".",
"start_ts",
"==",
"0",
":",
"self",
".",
"_ctx",
".",
"start_ts",
"=",
"src",
".",
"start_ts",
"elif",
"self",
".",
"_ctx",
".",
"start_ts",
"!=",
"src",
".",
"start_ts",
":",
"# This condition should never be true.",
"raise",
"Exception",
"(",
"'StartTs mismatch'",
")",
"self",
".",
"_ctx",
".",
"keys",
".",
"extend",
"(",
"src",
".",
"keys",
")",
"self",
".",
"_ctx",
".",
"preds",
".",
"extend",
"(",
"src",
".",
"preds",
")"
] |
Merges context from this instance with src.
|
[
"Merges",
"context",
"from",
"this",
"instance",
"with",
"src",
"."
] |
0fe85f6593cb2148475750bc8555a6fdf509054b
|
https://github.com/dgraph-io/pydgraph/blob/0fe85f6593cb2148475750bc8555a6fdf509054b/pydgraph/txn.py#L263-L277
|
236,873
|
anx-ckreuzberger/django-rest-passwordreset
|
django_rest_passwordreset/tokens.py
|
RandomStringTokenGenerator.generate_token
|
def generate_token(self, *args, **kwargs):
""" generates a pseudo random code using os.urandom and binascii.hexlify """
# determine the length based on min_length and max_length
length = random.randint(self.min_length, self.max_length)
# generate the token using os.urandom and hexlify
return binascii.hexlify(
os.urandom(self.max_length)
).decode()[0:length]
|
python
|
def generate_token(self, *args, **kwargs):
""" generates a pseudo random code using os.urandom and binascii.hexlify """
# determine the length based on min_length and max_length
length = random.randint(self.min_length, self.max_length)
# generate the token using os.urandom and hexlify
return binascii.hexlify(
os.urandom(self.max_length)
).decode()[0:length]
|
[
"def",
"generate_token",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"# determine the length based on min_length and max_length",
"length",
"=",
"random",
".",
"randint",
"(",
"self",
".",
"min_length",
",",
"self",
".",
"max_length",
")",
"# generate the token using os.urandom and hexlify",
"return",
"binascii",
".",
"hexlify",
"(",
"os",
".",
"urandom",
"(",
"self",
".",
"max_length",
")",
")",
".",
"decode",
"(",
")",
"[",
"0",
":",
"length",
"]"
] |
generates a pseudo random code using os.urandom and binascii.hexlify
|
[
"generates",
"a",
"pseudo",
"random",
"code",
"using",
"os",
".",
"urandom",
"and",
"binascii",
".",
"hexlify"
] |
7118d430d4b21f78a23530c88a33390c9b6a4f95
|
https://github.com/anx-ckreuzberger/django-rest-passwordreset/blob/7118d430d4b21f78a23530c88a33390c9b6a4f95/django_rest_passwordreset/tokens.py#L61-L69
|
236,874
|
pinax/pinax-blog
|
pinax/blog/parsers/creole_parser.py
|
HtmlEmitter.get_text
|
def get_text(self, node):
"""Try to emit whatever text is in the node."""
try:
return node.children[0].content or ""
except (AttributeError, IndexError):
return node.content or ""
|
python
|
def get_text(self, node):
"""Try to emit whatever text is in the node."""
try:
return node.children[0].content or ""
except (AttributeError, IndexError):
return node.content or ""
|
[
"def",
"get_text",
"(",
"self",
",",
"node",
")",
":",
"try",
":",
"return",
"node",
".",
"children",
"[",
"0",
"]",
".",
"content",
"or",
"\"\"",
"except",
"(",
"AttributeError",
",",
"IndexError",
")",
":",
"return",
"node",
".",
"content",
"or",
"\"\""
] |
Try to emit whatever text is in the node.
|
[
"Try",
"to",
"emit",
"whatever",
"text",
"is",
"in",
"the",
"node",
"."
] |
be1d64946381b47d197b258a488d5de56aacccce
|
https://github.com/pinax/pinax-blog/blob/be1d64946381b47d197b258a488d5de56aacccce/pinax/blog/parsers/creole_parser.py#L35-L40
|
236,875
|
pinax/pinax-blog
|
pinax/blog/parsers/creole_parser.py
|
HtmlEmitter.emit_children
|
def emit_children(self, node):
"""Emit all the children of a node."""
return "".join([self.emit_node(child) for child in node.children])
|
python
|
def emit_children(self, node):
"""Emit all the children of a node."""
return "".join([self.emit_node(child) for child in node.children])
|
[
"def",
"emit_children",
"(",
"self",
",",
"node",
")",
":",
"return",
"\"\"",
".",
"join",
"(",
"[",
"self",
".",
"emit_node",
"(",
"child",
")",
"for",
"child",
"in",
"node",
".",
"children",
"]",
")"
] |
Emit all the children of a node.
|
[
"Emit",
"all",
"the",
"children",
"of",
"a",
"node",
"."
] |
be1d64946381b47d197b258a488d5de56aacccce
|
https://github.com/pinax/pinax-blog/blob/be1d64946381b47d197b258a488d5de56aacccce/pinax/blog/parsers/creole_parser.py#L138-L140
|
236,876
|
pinax/pinax-blog
|
pinax/blog/parsers/creole_parser.py
|
HtmlEmitter.emit_node
|
def emit_node(self, node):
"""Emit a single node."""
emit = getattr(self, "%s_emit" % node.kind, self.default_emit)
return emit(node)
|
python
|
def emit_node(self, node):
"""Emit a single node."""
emit = getattr(self, "%s_emit" % node.kind, self.default_emit)
return emit(node)
|
[
"def",
"emit_node",
"(",
"self",
",",
"node",
")",
":",
"emit",
"=",
"getattr",
"(",
"self",
",",
"\"%s_emit\"",
"%",
"node",
".",
"kind",
",",
"self",
".",
"default_emit",
")",
"return",
"emit",
"(",
"node",
")"
] |
Emit a single node.
|
[
"Emit",
"a",
"single",
"node",
"."
] |
be1d64946381b47d197b258a488d5de56aacccce
|
https://github.com/pinax/pinax-blog/blob/be1d64946381b47d197b258a488d5de56aacccce/pinax/blog/parsers/creole_parser.py#L142-L145
|
236,877
|
pinax/pinax-blog
|
pinax/blog/views.py
|
ajax_preview
|
def ajax_preview(request, **kwargs):
"""
Currently only supports markdown
"""
data = {
"html": render_to_string("pinax/blog/_preview.html", {
"content": parse(request.POST.get("markup"))
})
}
return JsonResponse(data)
|
python
|
def ajax_preview(request, **kwargs):
"""
Currently only supports markdown
"""
data = {
"html": render_to_string("pinax/blog/_preview.html", {
"content": parse(request.POST.get("markup"))
})
}
return JsonResponse(data)
|
[
"def",
"ajax_preview",
"(",
"request",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"{",
"\"html\"",
":",
"render_to_string",
"(",
"\"pinax/blog/_preview.html\"",
",",
"{",
"\"content\"",
":",
"parse",
"(",
"request",
".",
"POST",
".",
"get",
"(",
"\"markup\"",
")",
")",
"}",
")",
"}",
"return",
"JsonResponse",
"(",
"data",
")"
] |
Currently only supports markdown
|
[
"Currently",
"only",
"supports",
"markdown"
] |
be1d64946381b47d197b258a488d5de56aacccce
|
https://github.com/pinax/pinax-blog/blob/be1d64946381b47d197b258a488d5de56aacccce/pinax/blog/views.py#L285-L294
|
236,878
|
closeio/tasktiger
|
tasktiger/redis_semaphore.py
|
Semaphore.set_system_lock
|
def set_system_lock(cls, redis, name, timeout):
"""
Set system lock for the semaphore.
Sets a system lock that will expire in timeout seconds. This
overrides all other locks. Existing locks cannot be renewed
and no new locks will be permitted until the system lock
expires.
Arguments:
redis: Redis client
name: Name of lock. Used as ZSET key.
timeout: Timeout in seconds for system lock
"""
pipeline = redis.pipeline()
pipeline.zadd(name, SYSTEM_LOCK_ID, time.time() + timeout)
pipeline.expire(name, timeout + 10) # timeout plus buffer for troubleshooting
pipeline.execute()
|
python
|
def set_system_lock(cls, redis, name, timeout):
"""
Set system lock for the semaphore.
Sets a system lock that will expire in timeout seconds. This
overrides all other locks. Existing locks cannot be renewed
and no new locks will be permitted until the system lock
expires.
Arguments:
redis: Redis client
name: Name of lock. Used as ZSET key.
timeout: Timeout in seconds for system lock
"""
pipeline = redis.pipeline()
pipeline.zadd(name, SYSTEM_LOCK_ID, time.time() + timeout)
pipeline.expire(name, timeout + 10) # timeout plus buffer for troubleshooting
pipeline.execute()
|
[
"def",
"set_system_lock",
"(",
"cls",
",",
"redis",
",",
"name",
",",
"timeout",
")",
":",
"pipeline",
"=",
"redis",
".",
"pipeline",
"(",
")",
"pipeline",
".",
"zadd",
"(",
"name",
",",
"SYSTEM_LOCK_ID",
",",
"time",
".",
"time",
"(",
")",
"+",
"timeout",
")",
"pipeline",
".",
"expire",
"(",
"name",
",",
"timeout",
"+",
"10",
")",
"# timeout plus buffer for troubleshooting",
"pipeline",
".",
"execute",
"(",
")"
] |
Set system lock for the semaphore.
Sets a system lock that will expire in timeout seconds. This
overrides all other locks. Existing locks cannot be renewed
and no new locks will be permitted until the system lock
expires.
Arguments:
redis: Redis client
name: Name of lock. Used as ZSET key.
timeout: Timeout in seconds for system lock
|
[
"Set",
"system",
"lock",
"for",
"the",
"semaphore",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/redis_semaphore.py#L51-L69
|
236,879
|
closeio/tasktiger
|
tasktiger/redis_semaphore.py
|
Semaphore.acquire
|
def acquire(self):
"""
Obtain a semaphore lock.
Returns: Tuple that contains True/False if the lock was acquired and number of
locks in semaphore.
"""
acquired, locks = self._semaphore(keys=[self.name],
args=[self.lock_id, self.max_locks,
self.timeout, time.time()])
# Convert Lua boolean returns to Python booleans
acquired = True if acquired == 1 else False
return acquired, locks
|
python
|
def acquire(self):
"""
Obtain a semaphore lock.
Returns: Tuple that contains True/False if the lock was acquired and number of
locks in semaphore.
"""
acquired, locks = self._semaphore(keys=[self.name],
args=[self.lock_id, self.max_locks,
self.timeout, time.time()])
# Convert Lua boolean returns to Python booleans
acquired = True if acquired == 1 else False
return acquired, locks
|
[
"def",
"acquire",
"(",
"self",
")",
":",
"acquired",
",",
"locks",
"=",
"self",
".",
"_semaphore",
"(",
"keys",
"=",
"[",
"self",
".",
"name",
"]",
",",
"args",
"=",
"[",
"self",
".",
"lock_id",
",",
"self",
".",
"max_locks",
",",
"self",
".",
"timeout",
",",
"time",
".",
"time",
"(",
")",
"]",
")",
"# Convert Lua boolean returns to Python booleans",
"acquired",
"=",
"True",
"if",
"acquired",
"==",
"1",
"else",
"False",
"return",
"acquired",
",",
"locks"
] |
Obtain a semaphore lock.
Returns: Tuple that contains True/False if the lock was acquired and number of
locks in semaphore.
|
[
"Obtain",
"a",
"semaphore",
"lock",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/redis_semaphore.py#L76-L91
|
236,880
|
closeio/tasktiger
|
tasktiger/redis_lock.py
|
NewStyleLock.renew
|
def renew(self, new_timeout):
"""
Sets a new timeout for an already acquired lock.
``new_timeout`` can be specified as an integer or a float, both
representing the number of seconds.
"""
if self.local.token is None:
raise LockError("Cannot extend an unlocked lock")
if self.timeout is None:
raise LockError("Cannot extend a lock with no timeout")
return self.do_renew(new_timeout)
|
python
|
def renew(self, new_timeout):
"""
Sets a new timeout for an already acquired lock.
``new_timeout`` can be specified as an integer or a float, both
representing the number of seconds.
"""
if self.local.token is None:
raise LockError("Cannot extend an unlocked lock")
if self.timeout is None:
raise LockError("Cannot extend a lock with no timeout")
return self.do_renew(new_timeout)
|
[
"def",
"renew",
"(",
"self",
",",
"new_timeout",
")",
":",
"if",
"self",
".",
"local",
".",
"token",
"is",
"None",
":",
"raise",
"LockError",
"(",
"\"Cannot extend an unlocked lock\"",
")",
"if",
"self",
".",
"timeout",
"is",
"None",
":",
"raise",
"LockError",
"(",
"\"Cannot extend a lock with no timeout\"",
")",
"return",
"self",
".",
"do_renew",
"(",
"new_timeout",
")"
] |
Sets a new timeout for an already acquired lock.
``new_timeout`` can be specified as an integer or a float, both
representing the number of seconds.
|
[
"Sets",
"a",
"new",
"timeout",
"for",
"an",
"already",
"acquired",
"lock",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/redis_lock.py#L122-L133
|
236,881
|
closeio/tasktiger
|
tasktiger/__init__.py
|
TaskTiger.task
|
def task(self, _fn=None, queue=None, hard_timeout=None, unique=None,
lock=None, lock_key=None, retry=None, retry_on=None,
retry_method=None, schedule=None, batch=False,
max_queue_size=None):
"""
Function decorator that defines the behavior of the function when it is
used as a task. To use the default behavior, tasks don't need to be
decorated.
See README.rst for an explanation of the options.
"""
def _delay(func):
def _delay_inner(*args, **kwargs):
return self.delay(func, args=args, kwargs=kwargs)
return _delay_inner
# Periodic tasks are unique.
if schedule is not None:
unique = True
def _wrap(func):
if hard_timeout is not None:
func._task_hard_timeout = hard_timeout
if queue is not None:
func._task_queue = queue
if unique is not None:
func._task_unique = unique
if lock is not None:
func._task_lock = lock
if lock_key is not None:
func._task_lock_key = lock_key
if retry is not None:
func._task_retry = retry
if retry_on is not None:
func._task_retry_on = retry_on
if retry_method is not None:
func._task_retry_method = retry_method
if batch is not None:
func._task_batch = batch
if schedule is not None:
func._task_schedule = schedule
if max_queue_size is not None:
func._task_max_queue_size = max_queue_size
func.delay = _delay(func)
if schedule is not None:
serialized_func = serialize_func_name(func)
assert serialized_func not in self.periodic_task_funcs, \
"attempted duplicate registration of periodic task"
self.periodic_task_funcs[serialized_func] = func
return func
return _wrap if _fn is None else _wrap(_fn)
|
python
|
def task(self, _fn=None, queue=None, hard_timeout=None, unique=None,
lock=None, lock_key=None, retry=None, retry_on=None,
retry_method=None, schedule=None, batch=False,
max_queue_size=None):
"""
Function decorator that defines the behavior of the function when it is
used as a task. To use the default behavior, tasks don't need to be
decorated.
See README.rst for an explanation of the options.
"""
def _delay(func):
def _delay_inner(*args, **kwargs):
return self.delay(func, args=args, kwargs=kwargs)
return _delay_inner
# Periodic tasks are unique.
if schedule is not None:
unique = True
def _wrap(func):
if hard_timeout is not None:
func._task_hard_timeout = hard_timeout
if queue is not None:
func._task_queue = queue
if unique is not None:
func._task_unique = unique
if lock is not None:
func._task_lock = lock
if lock_key is not None:
func._task_lock_key = lock_key
if retry is not None:
func._task_retry = retry
if retry_on is not None:
func._task_retry_on = retry_on
if retry_method is not None:
func._task_retry_method = retry_method
if batch is not None:
func._task_batch = batch
if schedule is not None:
func._task_schedule = schedule
if max_queue_size is not None:
func._task_max_queue_size = max_queue_size
func.delay = _delay(func)
if schedule is not None:
serialized_func = serialize_func_name(func)
assert serialized_func not in self.periodic_task_funcs, \
"attempted duplicate registration of periodic task"
self.periodic_task_funcs[serialized_func] = func
return func
return _wrap if _fn is None else _wrap(_fn)
|
[
"def",
"task",
"(",
"self",
",",
"_fn",
"=",
"None",
",",
"queue",
"=",
"None",
",",
"hard_timeout",
"=",
"None",
",",
"unique",
"=",
"None",
",",
"lock",
"=",
"None",
",",
"lock_key",
"=",
"None",
",",
"retry",
"=",
"None",
",",
"retry_on",
"=",
"None",
",",
"retry_method",
"=",
"None",
",",
"schedule",
"=",
"None",
",",
"batch",
"=",
"False",
",",
"max_queue_size",
"=",
"None",
")",
":",
"def",
"_delay",
"(",
"func",
")",
":",
"def",
"_delay_inner",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"return",
"self",
".",
"delay",
"(",
"func",
",",
"args",
"=",
"args",
",",
"kwargs",
"=",
"kwargs",
")",
"return",
"_delay_inner",
"# Periodic tasks are unique.",
"if",
"schedule",
"is",
"not",
"None",
":",
"unique",
"=",
"True",
"def",
"_wrap",
"(",
"func",
")",
":",
"if",
"hard_timeout",
"is",
"not",
"None",
":",
"func",
".",
"_task_hard_timeout",
"=",
"hard_timeout",
"if",
"queue",
"is",
"not",
"None",
":",
"func",
".",
"_task_queue",
"=",
"queue",
"if",
"unique",
"is",
"not",
"None",
":",
"func",
".",
"_task_unique",
"=",
"unique",
"if",
"lock",
"is",
"not",
"None",
":",
"func",
".",
"_task_lock",
"=",
"lock",
"if",
"lock_key",
"is",
"not",
"None",
":",
"func",
".",
"_task_lock_key",
"=",
"lock_key",
"if",
"retry",
"is",
"not",
"None",
":",
"func",
".",
"_task_retry",
"=",
"retry",
"if",
"retry_on",
"is",
"not",
"None",
":",
"func",
".",
"_task_retry_on",
"=",
"retry_on",
"if",
"retry_method",
"is",
"not",
"None",
":",
"func",
".",
"_task_retry_method",
"=",
"retry_method",
"if",
"batch",
"is",
"not",
"None",
":",
"func",
".",
"_task_batch",
"=",
"batch",
"if",
"schedule",
"is",
"not",
"None",
":",
"func",
".",
"_task_schedule",
"=",
"schedule",
"if",
"max_queue_size",
"is",
"not",
"None",
":",
"func",
".",
"_task_max_queue_size",
"=",
"max_queue_size",
"func",
".",
"delay",
"=",
"_delay",
"(",
"func",
")",
"if",
"schedule",
"is",
"not",
"None",
":",
"serialized_func",
"=",
"serialize_func_name",
"(",
"func",
")",
"assert",
"serialized_func",
"not",
"in",
"self",
".",
"periodic_task_funcs",
",",
"\"attempted duplicate registration of periodic task\"",
"self",
".",
"periodic_task_funcs",
"[",
"serialized_func",
"]",
"=",
"func",
"return",
"func",
"return",
"_wrap",
"if",
"_fn",
"is",
"None",
"else",
"_wrap",
"(",
"_fn",
")"
] |
Function decorator that defines the behavior of the function when it is
used as a task. To use the default behavior, tasks don't need to be
decorated.
See README.rst for an explanation of the options.
|
[
"Function",
"decorator",
"that",
"defines",
"the",
"behavior",
"of",
"the",
"function",
"when",
"it",
"is",
"used",
"as",
"a",
"task",
".",
"To",
"use",
"the",
"default",
"behavior",
"tasks",
"don",
"t",
"need",
"to",
"be",
"decorated",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/__init__.py#L242-L297
|
236,882
|
closeio/tasktiger
|
tasktiger/__init__.py
|
TaskTiger.run_worker
|
def run_worker(self, queues=None, module=None, exclude_queues=None,
max_workers_per_queue=None, store_tracebacks=None):
"""
Main worker entry point method.
The arguments are explained in the module-level run_worker() method's
click options.
"""
try:
module_names = module or ''
for module_name in module_names.split(','):
module_name = module_name.strip()
if module_name:
importlib.import_module(module_name)
self.log.debug('imported module', module_name=module_name)
worker = Worker(self,
queues.split(',') if queues else None,
exclude_queues.split(',') if exclude_queues else None,
max_workers_per_queue=max_workers_per_queue,
store_tracebacks=store_tracebacks)
worker.run()
except Exception:
self.log.exception('Unhandled exception')
raise
|
python
|
def run_worker(self, queues=None, module=None, exclude_queues=None,
max_workers_per_queue=None, store_tracebacks=None):
"""
Main worker entry point method.
The arguments are explained in the module-level run_worker() method's
click options.
"""
try:
module_names = module or ''
for module_name in module_names.split(','):
module_name = module_name.strip()
if module_name:
importlib.import_module(module_name)
self.log.debug('imported module', module_name=module_name)
worker = Worker(self,
queues.split(',') if queues else None,
exclude_queues.split(',') if exclude_queues else None,
max_workers_per_queue=max_workers_per_queue,
store_tracebacks=store_tracebacks)
worker.run()
except Exception:
self.log.exception('Unhandled exception')
raise
|
[
"def",
"run_worker",
"(",
"self",
",",
"queues",
"=",
"None",
",",
"module",
"=",
"None",
",",
"exclude_queues",
"=",
"None",
",",
"max_workers_per_queue",
"=",
"None",
",",
"store_tracebacks",
"=",
"None",
")",
":",
"try",
":",
"module_names",
"=",
"module",
"or",
"''",
"for",
"module_name",
"in",
"module_names",
".",
"split",
"(",
"','",
")",
":",
"module_name",
"=",
"module_name",
".",
"strip",
"(",
")",
"if",
"module_name",
":",
"importlib",
".",
"import_module",
"(",
"module_name",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'imported module'",
",",
"module_name",
"=",
"module_name",
")",
"worker",
"=",
"Worker",
"(",
"self",
",",
"queues",
".",
"split",
"(",
"','",
")",
"if",
"queues",
"else",
"None",
",",
"exclude_queues",
".",
"split",
"(",
"','",
")",
"if",
"exclude_queues",
"else",
"None",
",",
"max_workers_per_queue",
"=",
"max_workers_per_queue",
",",
"store_tracebacks",
"=",
"store_tracebacks",
")",
"worker",
".",
"run",
"(",
")",
"except",
"Exception",
":",
"self",
".",
"log",
".",
"exception",
"(",
"'Unhandled exception'",
")",
"raise"
] |
Main worker entry point method.
The arguments are explained in the module-level run_worker() method's
click options.
|
[
"Main",
"worker",
"entry",
"point",
"method",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/__init__.py#L306-L331
|
236,883
|
closeio/tasktiger
|
tasktiger/__init__.py
|
TaskTiger.delay
|
def delay(self, func, args=None, kwargs=None, queue=None,
hard_timeout=None, unique=None, lock=None, lock_key=None,
when=None, retry=None, retry_on=None, retry_method=None,
max_queue_size=None):
"""
Queues a task. See README.rst for an explanation of the options.
"""
task = Task(self, func, args=args, kwargs=kwargs, queue=queue,
hard_timeout=hard_timeout, unique=unique,
lock=lock, lock_key=lock_key,
retry=retry, retry_on=retry_on, retry_method=retry_method)
task.delay(when=when, max_queue_size=max_queue_size)
return task
|
python
|
def delay(self, func, args=None, kwargs=None, queue=None,
hard_timeout=None, unique=None, lock=None, lock_key=None,
when=None, retry=None, retry_on=None, retry_method=None,
max_queue_size=None):
"""
Queues a task. See README.rst for an explanation of the options.
"""
task = Task(self, func, args=args, kwargs=kwargs, queue=queue,
hard_timeout=hard_timeout, unique=unique,
lock=lock, lock_key=lock_key,
retry=retry, retry_on=retry_on, retry_method=retry_method)
task.delay(when=when, max_queue_size=max_queue_size)
return task
|
[
"def",
"delay",
"(",
"self",
",",
"func",
",",
"args",
"=",
"None",
",",
"kwargs",
"=",
"None",
",",
"queue",
"=",
"None",
",",
"hard_timeout",
"=",
"None",
",",
"unique",
"=",
"None",
",",
"lock",
"=",
"None",
",",
"lock_key",
"=",
"None",
",",
"when",
"=",
"None",
",",
"retry",
"=",
"None",
",",
"retry_on",
"=",
"None",
",",
"retry_method",
"=",
"None",
",",
"max_queue_size",
"=",
"None",
")",
":",
"task",
"=",
"Task",
"(",
"self",
",",
"func",
",",
"args",
"=",
"args",
",",
"kwargs",
"=",
"kwargs",
",",
"queue",
"=",
"queue",
",",
"hard_timeout",
"=",
"hard_timeout",
",",
"unique",
"=",
"unique",
",",
"lock",
"=",
"lock",
",",
"lock_key",
"=",
"lock_key",
",",
"retry",
"=",
"retry",
",",
"retry_on",
"=",
"retry_on",
",",
"retry_method",
"=",
"retry_method",
")",
"task",
".",
"delay",
"(",
"when",
"=",
"when",
",",
"max_queue_size",
"=",
"max_queue_size",
")",
"return",
"task"
] |
Queues a task. See README.rst for an explanation of the options.
|
[
"Queues",
"a",
"task",
".",
"See",
"README",
".",
"rst",
"for",
"an",
"explanation",
"of",
"the",
"options",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/__init__.py#L333-L348
|
236,884
|
closeio/tasktiger
|
tasktiger/__init__.py
|
TaskTiger.get_queue_sizes
|
def get_queue_sizes(self, queue):
"""
Get the queue's number of tasks in each state.
Returns dict with queue size for the QUEUED, SCHEDULED, and ACTIVE
states. Does not include size of error queue.
"""
states = [QUEUED, SCHEDULED, ACTIVE]
pipeline = self.connection.pipeline()
for state in states:
pipeline.zcard(self._key(state, queue))
results = pipeline.execute()
return dict(zip(states, results))
|
python
|
def get_queue_sizes(self, queue):
"""
Get the queue's number of tasks in each state.
Returns dict with queue size for the QUEUED, SCHEDULED, and ACTIVE
states. Does not include size of error queue.
"""
states = [QUEUED, SCHEDULED, ACTIVE]
pipeline = self.connection.pipeline()
for state in states:
pipeline.zcard(self._key(state, queue))
results = pipeline.execute()
return dict(zip(states, results))
|
[
"def",
"get_queue_sizes",
"(",
"self",
",",
"queue",
")",
":",
"states",
"=",
"[",
"QUEUED",
",",
"SCHEDULED",
",",
"ACTIVE",
"]",
"pipeline",
"=",
"self",
".",
"connection",
".",
"pipeline",
"(",
")",
"for",
"state",
"in",
"states",
":",
"pipeline",
".",
"zcard",
"(",
"self",
".",
"_key",
"(",
"state",
",",
"queue",
")",
")",
"results",
"=",
"pipeline",
".",
"execute",
"(",
")",
"return",
"dict",
"(",
"zip",
"(",
"states",
",",
"results",
")",
")"
] |
Get the queue's number of tasks in each state.
Returns dict with queue size for the QUEUED, SCHEDULED, and ACTIVE
states. Does not include size of error queue.
|
[
"Get",
"the",
"queue",
"s",
"number",
"of",
"tasks",
"in",
"each",
"state",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/__init__.py#L350-L363
|
236,885
|
closeio/tasktiger
|
tasktiger/__init__.py
|
TaskTiger.get_queue_system_lock
|
def get_queue_system_lock(self, queue):
"""
Get system lock timeout
Returns time system lock expires or None if lock does not exist
"""
key = self._key(LOCK_REDIS_KEY, queue)
return Semaphore.get_system_lock(self.connection, key)
|
python
|
def get_queue_system_lock(self, queue):
"""
Get system lock timeout
Returns time system lock expires or None if lock does not exist
"""
key = self._key(LOCK_REDIS_KEY, queue)
return Semaphore.get_system_lock(self.connection, key)
|
[
"def",
"get_queue_system_lock",
"(",
"self",
",",
"queue",
")",
":",
"key",
"=",
"self",
".",
"_key",
"(",
"LOCK_REDIS_KEY",
",",
"queue",
")",
"return",
"Semaphore",
".",
"get_system_lock",
"(",
"self",
".",
"connection",
",",
"key",
")"
] |
Get system lock timeout
Returns time system lock expires or None if lock does not exist
|
[
"Get",
"system",
"lock",
"timeout"
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/__init__.py#L370-L378
|
236,886
|
closeio/tasktiger
|
tasktiger/__init__.py
|
TaskTiger.set_queue_system_lock
|
def set_queue_system_lock(self, queue, timeout):
"""
Set system lock on a queue.
Max workers for this queue must be used for this to have any effect.
This will keep workers from processing tasks for this queue until
the timeout has expired. Active tasks will continue processing their
current task.
timeout is number of seconds to hold the lock
"""
key = self._key(LOCK_REDIS_KEY, queue)
Semaphore.set_system_lock(self.connection, key, timeout)
|
python
|
def set_queue_system_lock(self, queue, timeout):
"""
Set system lock on a queue.
Max workers for this queue must be used for this to have any effect.
This will keep workers from processing tasks for this queue until
the timeout has expired. Active tasks will continue processing their
current task.
timeout is number of seconds to hold the lock
"""
key = self._key(LOCK_REDIS_KEY, queue)
Semaphore.set_system_lock(self.connection, key, timeout)
|
[
"def",
"set_queue_system_lock",
"(",
"self",
",",
"queue",
",",
"timeout",
")",
":",
"key",
"=",
"self",
".",
"_key",
"(",
"LOCK_REDIS_KEY",
",",
"queue",
")",
"Semaphore",
".",
"set_system_lock",
"(",
"self",
".",
"connection",
",",
"key",
",",
"timeout",
")"
] |
Set system lock on a queue.
Max workers for this queue must be used for this to have any effect.
This will keep workers from processing tasks for this queue until
the timeout has expired. Active tasks will continue processing their
current task.
timeout is number of seconds to hold the lock
|
[
"Set",
"system",
"lock",
"on",
"a",
"queue",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/__init__.py#L380-L394
|
236,887
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._install_signal_handlers
|
def _install_signal_handlers(self):
"""
Sets up signal handlers for safely stopping the worker.
"""
def request_stop(signum, frame):
self._stop_requested = True
self.log.info('stop requested, waiting for task to finish')
signal.signal(signal.SIGINT, request_stop)
signal.signal(signal.SIGTERM, request_stop)
|
python
|
def _install_signal_handlers(self):
"""
Sets up signal handlers for safely stopping the worker.
"""
def request_stop(signum, frame):
self._stop_requested = True
self.log.info('stop requested, waiting for task to finish')
signal.signal(signal.SIGINT, request_stop)
signal.signal(signal.SIGTERM, request_stop)
|
[
"def",
"_install_signal_handlers",
"(",
"self",
")",
":",
"def",
"request_stop",
"(",
"signum",
",",
"frame",
")",
":",
"self",
".",
"_stop_requested",
"=",
"True",
"self",
".",
"log",
".",
"info",
"(",
"'stop requested, waiting for task to finish'",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"request_stop",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTERM",
",",
"request_stop",
")"
] |
Sets up signal handlers for safely stopping the worker.
|
[
"Sets",
"up",
"signal",
"handlers",
"for",
"safely",
"stopping",
"the",
"worker",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L105-L113
|
236,888
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._uninstall_signal_handlers
|
def _uninstall_signal_handlers(self):
"""
Restores default signal handlers.
"""
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
python
|
def _uninstall_signal_handlers(self):
"""
Restores default signal handlers.
"""
signal.signal(signal.SIGINT, signal.SIG_DFL)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
|
[
"def",
"_uninstall_signal_handlers",
"(",
"self",
")",
":",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"signal",
".",
"SIG_DFL",
")",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGTERM",
",",
"signal",
".",
"SIG_DFL",
")"
] |
Restores default signal handlers.
|
[
"Restores",
"default",
"signal",
"handlers",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L115-L120
|
236,889
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._filter_queues
|
def _filter_queues(self, queues):
"""
Applies the queue filter to the given list of queues and returns the
queues that match. Note that a queue name matches any subqueues
starting with the name, followed by a date. For example, "foo" will
match both "foo" and "foo.bar".
"""
def match(queue):
"""
Returns whether the given queue should be included by checking each
part of the queue name.
"""
for part in reversed_dotted_parts(queue):
if part in self.exclude_queues:
return False
if part in self.only_queues:
return True
return not self.only_queues
return [q for q in queues if match(q)]
|
python
|
def _filter_queues(self, queues):
"""
Applies the queue filter to the given list of queues and returns the
queues that match. Note that a queue name matches any subqueues
starting with the name, followed by a date. For example, "foo" will
match both "foo" and "foo.bar".
"""
def match(queue):
"""
Returns whether the given queue should be included by checking each
part of the queue name.
"""
for part in reversed_dotted_parts(queue):
if part in self.exclude_queues:
return False
if part in self.only_queues:
return True
return not self.only_queues
return [q for q in queues if match(q)]
|
[
"def",
"_filter_queues",
"(",
"self",
",",
"queues",
")",
":",
"def",
"match",
"(",
"queue",
")",
":",
"\"\"\"\n Returns whether the given queue should be included by checking each\n part of the queue name.\n \"\"\"",
"for",
"part",
"in",
"reversed_dotted_parts",
"(",
"queue",
")",
":",
"if",
"part",
"in",
"self",
".",
"exclude_queues",
":",
"return",
"False",
"if",
"part",
"in",
"self",
".",
"only_queues",
":",
"return",
"True",
"return",
"not",
"self",
".",
"only_queues",
"return",
"[",
"q",
"for",
"q",
"in",
"queues",
"if",
"match",
"(",
"q",
")",
"]"
] |
Applies the queue filter to the given list of queues and returns the
queues that match. Note that a queue name matches any subqueues
starting with the name, followed by a date. For example, "foo" will
match both "foo" and "foo.bar".
|
[
"Applies",
"the",
"queue",
"filter",
"to",
"the",
"given",
"list",
"of",
"queues",
"and",
"returns",
"the",
"queues",
"that",
"match",
".",
"Note",
"that",
"a",
"queue",
"name",
"matches",
"any",
"subqueues",
"starting",
"with",
"the",
"name",
"followed",
"by",
"a",
"date",
".",
"For",
"example",
"foo",
"will",
"match",
"both",
"foo",
"and",
"foo",
".",
"bar",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L122-L142
|
236,890
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._worker_queue_scheduled_tasks
|
def _worker_queue_scheduled_tasks(self):
"""
Helper method that takes due tasks from the SCHEDULED queue and puts
them in the QUEUED queue for execution. This should be called
periodically.
"""
queues = set(self._filter_queues(self.connection.smembers(
self._key(SCHEDULED))))
now = time.time()
for queue in queues:
# Move due items from the SCHEDULED queue to the QUEUED queue. If
# items were moved, remove the queue from the scheduled set if it
# is empty, and add it to the queued set so the task gets picked
# up. If any unique tasks are already queued, don't update their
# queue time (because the new queue time would be later).
result = self.scripts.zpoppush(
self._key(SCHEDULED, queue),
self._key(QUEUED, queue),
self.config['SCHEDULED_TASK_BATCH_SIZE'],
now,
now,
if_exists=('noupdate',),
on_success=('update_sets', queue,
self._key(SCHEDULED), self._key(QUEUED)),
)
self.log.debug('scheduled tasks', queue=queue, qty=len(result))
# XXX: ideally this would be in the same pipeline, but we only want
# to announce if there was a result.
if result:
self.connection.publish(self._key('activity'), queue)
self._did_work = True
|
python
|
def _worker_queue_scheduled_tasks(self):
"""
Helper method that takes due tasks from the SCHEDULED queue and puts
them in the QUEUED queue for execution. This should be called
periodically.
"""
queues = set(self._filter_queues(self.connection.smembers(
self._key(SCHEDULED))))
now = time.time()
for queue in queues:
# Move due items from the SCHEDULED queue to the QUEUED queue. If
# items were moved, remove the queue from the scheduled set if it
# is empty, and add it to the queued set so the task gets picked
# up. If any unique tasks are already queued, don't update their
# queue time (because the new queue time would be later).
result = self.scripts.zpoppush(
self._key(SCHEDULED, queue),
self._key(QUEUED, queue),
self.config['SCHEDULED_TASK_BATCH_SIZE'],
now,
now,
if_exists=('noupdate',),
on_success=('update_sets', queue,
self._key(SCHEDULED), self._key(QUEUED)),
)
self.log.debug('scheduled tasks', queue=queue, qty=len(result))
# XXX: ideally this would be in the same pipeline, but we only want
# to announce if there was a result.
if result:
self.connection.publish(self._key('activity'), queue)
self._did_work = True
|
[
"def",
"_worker_queue_scheduled_tasks",
"(",
"self",
")",
":",
"queues",
"=",
"set",
"(",
"self",
".",
"_filter_queues",
"(",
"self",
".",
"connection",
".",
"smembers",
"(",
"self",
".",
"_key",
"(",
"SCHEDULED",
")",
")",
")",
")",
"now",
"=",
"time",
".",
"time",
"(",
")",
"for",
"queue",
"in",
"queues",
":",
"# Move due items from the SCHEDULED queue to the QUEUED queue. If",
"# items were moved, remove the queue from the scheduled set if it",
"# is empty, and add it to the queued set so the task gets picked",
"# up. If any unique tasks are already queued, don't update their",
"# queue time (because the new queue time would be later).",
"result",
"=",
"self",
".",
"scripts",
".",
"zpoppush",
"(",
"self",
".",
"_key",
"(",
"SCHEDULED",
",",
"queue",
")",
",",
"self",
".",
"_key",
"(",
"QUEUED",
",",
"queue",
")",
",",
"self",
".",
"config",
"[",
"'SCHEDULED_TASK_BATCH_SIZE'",
"]",
",",
"now",
",",
"now",
",",
"if_exists",
"=",
"(",
"'noupdate'",
",",
")",
",",
"on_success",
"=",
"(",
"'update_sets'",
",",
"queue",
",",
"self",
".",
"_key",
"(",
"SCHEDULED",
")",
",",
"self",
".",
"_key",
"(",
"QUEUED",
")",
")",
",",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'scheduled tasks'",
",",
"queue",
"=",
"queue",
",",
"qty",
"=",
"len",
"(",
"result",
")",
")",
"# XXX: ideally this would be in the same pipeline, but we only want",
"# to announce if there was a result.",
"if",
"result",
":",
"self",
".",
"connection",
".",
"publish",
"(",
"self",
".",
"_key",
"(",
"'activity'",
")",
",",
"queue",
")",
"self",
".",
"_did_work",
"=",
"True"
] |
Helper method that takes due tasks from the SCHEDULED queue and puts
them in the QUEUED queue for execution. This should be called
periodically.
|
[
"Helper",
"method",
"that",
"takes",
"due",
"tasks",
"from",
"the",
"SCHEDULED",
"queue",
"and",
"puts",
"them",
"in",
"the",
"QUEUED",
"queue",
"for",
"execution",
".",
"This",
"should",
"be",
"called",
"periodically",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L144-L175
|
236,891
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._wait_for_new_tasks
|
def _wait_for_new_tasks(self, timeout=0, batch_timeout=0):
"""
Check activity channel and wait as necessary.
This method is also used to slow down the main processing loop to reduce
the effects of rapidly sending Redis commands. This method will exit
for any of these conditions:
1. _did_work is True, suggests there could be more work pending
2. Found new queue and after batch timeout. Note batch timeout
can be zero so it will exit immediately.
3. Timeout seconds have passed, this is the maximum time to stay in
this method
"""
new_queue_found = False
start_time = batch_exit = time.time()
while True:
# Check to see if batch_exit has been updated
if batch_exit > start_time:
pubsub_sleep = batch_exit - time.time()
else:
pubsub_sleep = start_time + timeout - time.time()
message = self._pubsub.get_message(timeout=0 if pubsub_sleep < 0 or
self._did_work
else pubsub_sleep)
# Pull remaining messages off of channel
while message:
if message['type'] == 'message':
new_queue_found, batch_exit = self._process_queue_message(
message['data'], new_queue_found, batch_exit,
start_time, timeout, batch_timeout
)
message = self._pubsub.get_message()
if self._did_work:
break # Exit immediately if we did work during the last
# execution loop because there might be more work to do
elif time.time() >= batch_exit and new_queue_found:
break # After finding a new queue we can wait until the
# batch timeout expires
elif time.time() - start_time > timeout:
break
|
python
|
def _wait_for_new_tasks(self, timeout=0, batch_timeout=0):
"""
Check activity channel and wait as necessary.
This method is also used to slow down the main processing loop to reduce
the effects of rapidly sending Redis commands. This method will exit
for any of these conditions:
1. _did_work is True, suggests there could be more work pending
2. Found new queue and after batch timeout. Note batch timeout
can be zero so it will exit immediately.
3. Timeout seconds have passed, this is the maximum time to stay in
this method
"""
new_queue_found = False
start_time = batch_exit = time.time()
while True:
# Check to see if batch_exit has been updated
if batch_exit > start_time:
pubsub_sleep = batch_exit - time.time()
else:
pubsub_sleep = start_time + timeout - time.time()
message = self._pubsub.get_message(timeout=0 if pubsub_sleep < 0 or
self._did_work
else pubsub_sleep)
# Pull remaining messages off of channel
while message:
if message['type'] == 'message':
new_queue_found, batch_exit = self._process_queue_message(
message['data'], new_queue_found, batch_exit,
start_time, timeout, batch_timeout
)
message = self._pubsub.get_message()
if self._did_work:
break # Exit immediately if we did work during the last
# execution loop because there might be more work to do
elif time.time() >= batch_exit and new_queue_found:
break # After finding a new queue we can wait until the
# batch timeout expires
elif time.time() - start_time > timeout:
break
|
[
"def",
"_wait_for_new_tasks",
"(",
"self",
",",
"timeout",
"=",
"0",
",",
"batch_timeout",
"=",
"0",
")",
":",
"new_queue_found",
"=",
"False",
"start_time",
"=",
"batch_exit",
"=",
"time",
".",
"time",
"(",
")",
"while",
"True",
":",
"# Check to see if batch_exit has been updated",
"if",
"batch_exit",
">",
"start_time",
":",
"pubsub_sleep",
"=",
"batch_exit",
"-",
"time",
".",
"time",
"(",
")",
"else",
":",
"pubsub_sleep",
"=",
"start_time",
"+",
"timeout",
"-",
"time",
".",
"time",
"(",
")",
"message",
"=",
"self",
".",
"_pubsub",
".",
"get_message",
"(",
"timeout",
"=",
"0",
"if",
"pubsub_sleep",
"<",
"0",
"or",
"self",
".",
"_did_work",
"else",
"pubsub_sleep",
")",
"# Pull remaining messages off of channel",
"while",
"message",
":",
"if",
"message",
"[",
"'type'",
"]",
"==",
"'message'",
":",
"new_queue_found",
",",
"batch_exit",
"=",
"self",
".",
"_process_queue_message",
"(",
"message",
"[",
"'data'",
"]",
",",
"new_queue_found",
",",
"batch_exit",
",",
"start_time",
",",
"timeout",
",",
"batch_timeout",
")",
"message",
"=",
"self",
".",
"_pubsub",
".",
"get_message",
"(",
")",
"if",
"self",
".",
"_did_work",
":",
"break",
"# Exit immediately if we did work during the last",
"# execution loop because there might be more work to do",
"elif",
"time",
".",
"time",
"(",
")",
">=",
"batch_exit",
"and",
"new_queue_found",
":",
"break",
"# After finding a new queue we can wait until the",
"# batch timeout expires",
"elif",
"time",
".",
"time",
"(",
")",
"-",
"start_time",
">",
"timeout",
":",
"break"
] |
Check activity channel and wait as necessary.
This method is also used to slow down the main processing loop to reduce
the effects of rapidly sending Redis commands. This method will exit
for any of these conditions:
1. _did_work is True, suggests there could be more work pending
2. Found new queue and after batch timeout. Note batch timeout
can be zero so it will exit immediately.
3. Timeout seconds have passed, this is the maximum time to stay in
this method
|
[
"Check",
"activity",
"channel",
"and",
"wait",
"as",
"necessary",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L177-L220
|
236,892
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._execute_forked
|
def _execute_forked(self, tasks, log):
"""
Executes the tasks in the forked process. Multiple tasks can be passed
for batch processing. However, they must all use the same function and
will share the execution entry.
"""
success = False
execution = {}
assert len(tasks)
task_func = tasks[0].serialized_func
assert all([task_func == task.serialized_func for task in tasks[1:]])
execution['time_started'] = time.time()
exc = None
exc_info = None
try:
func = tasks[0].func
is_batch_func = getattr(func, '_task_batch', False)
g['current_task_is_batch'] = is_batch_func
if is_batch_func:
# Batch process if the task supports it.
params = [{
'args': task.args,
'kwargs': task.kwargs,
} for task in tasks]
task_timeouts = [task.hard_timeout for task in tasks if task.hard_timeout is not None]
hard_timeout = ((max(task_timeouts) if task_timeouts else None)
or
getattr(func, '_task_hard_timeout', None) or
self.config['DEFAULT_HARD_TIMEOUT'])
g['current_tasks'] = tasks
with UnixSignalDeathPenalty(hard_timeout):
func(params)
else:
# Process sequentially.
for task in tasks:
hard_timeout = (task.hard_timeout or
getattr(func, '_task_hard_timeout', None) or
self.config['DEFAULT_HARD_TIMEOUT'])
g['current_tasks'] = [task]
with UnixSignalDeathPenalty(hard_timeout):
func(*task.args, **task.kwargs)
except RetryException as exc:
execution['retry'] = True
if exc.method:
execution['retry_method'] = serialize_retry_method(exc.method)
execution['log_error'] = exc.log_error
execution['exception_name'] = serialize_func_name(exc.__class__)
exc_info = exc.exc_info or sys.exc_info()
except (JobTimeoutException, Exception) as exc:
execution['exception_name'] = serialize_func_name(exc.__class__)
exc_info = sys.exc_info()
else:
success = True
if not success:
execution['time_failed'] = time.time()
if self.store_tracebacks:
# Currently we only log failed task executions to Redis.
execution['traceback'] = \
''.join(traceback.format_exception(*exc_info))
execution['success'] = success
execution['host'] = socket.gethostname()
serialized_execution = json.dumps(execution)
for task in tasks:
self.connection.rpush(self._key('task', task.id, 'executions'),
serialized_execution)
return success
|
python
|
def _execute_forked(self, tasks, log):
"""
Executes the tasks in the forked process. Multiple tasks can be passed
for batch processing. However, they must all use the same function and
will share the execution entry.
"""
success = False
execution = {}
assert len(tasks)
task_func = tasks[0].serialized_func
assert all([task_func == task.serialized_func for task in tasks[1:]])
execution['time_started'] = time.time()
exc = None
exc_info = None
try:
func = tasks[0].func
is_batch_func = getattr(func, '_task_batch', False)
g['current_task_is_batch'] = is_batch_func
if is_batch_func:
# Batch process if the task supports it.
params = [{
'args': task.args,
'kwargs': task.kwargs,
} for task in tasks]
task_timeouts = [task.hard_timeout for task in tasks if task.hard_timeout is not None]
hard_timeout = ((max(task_timeouts) if task_timeouts else None)
or
getattr(func, '_task_hard_timeout', None) or
self.config['DEFAULT_HARD_TIMEOUT'])
g['current_tasks'] = tasks
with UnixSignalDeathPenalty(hard_timeout):
func(params)
else:
# Process sequentially.
for task in tasks:
hard_timeout = (task.hard_timeout or
getattr(func, '_task_hard_timeout', None) or
self.config['DEFAULT_HARD_TIMEOUT'])
g['current_tasks'] = [task]
with UnixSignalDeathPenalty(hard_timeout):
func(*task.args, **task.kwargs)
except RetryException as exc:
execution['retry'] = True
if exc.method:
execution['retry_method'] = serialize_retry_method(exc.method)
execution['log_error'] = exc.log_error
execution['exception_name'] = serialize_func_name(exc.__class__)
exc_info = exc.exc_info or sys.exc_info()
except (JobTimeoutException, Exception) as exc:
execution['exception_name'] = serialize_func_name(exc.__class__)
exc_info = sys.exc_info()
else:
success = True
if not success:
execution['time_failed'] = time.time()
if self.store_tracebacks:
# Currently we only log failed task executions to Redis.
execution['traceback'] = \
''.join(traceback.format_exception(*exc_info))
execution['success'] = success
execution['host'] = socket.gethostname()
serialized_execution = json.dumps(execution)
for task in tasks:
self.connection.rpush(self._key('task', task.id, 'executions'),
serialized_execution)
return success
|
[
"def",
"_execute_forked",
"(",
"self",
",",
"tasks",
",",
"log",
")",
":",
"success",
"=",
"False",
"execution",
"=",
"{",
"}",
"assert",
"len",
"(",
"tasks",
")",
"task_func",
"=",
"tasks",
"[",
"0",
"]",
".",
"serialized_func",
"assert",
"all",
"(",
"[",
"task_func",
"==",
"task",
".",
"serialized_func",
"for",
"task",
"in",
"tasks",
"[",
"1",
":",
"]",
"]",
")",
"execution",
"[",
"'time_started'",
"]",
"=",
"time",
".",
"time",
"(",
")",
"exc",
"=",
"None",
"exc_info",
"=",
"None",
"try",
":",
"func",
"=",
"tasks",
"[",
"0",
"]",
".",
"func",
"is_batch_func",
"=",
"getattr",
"(",
"func",
",",
"'_task_batch'",
",",
"False",
")",
"g",
"[",
"'current_task_is_batch'",
"]",
"=",
"is_batch_func",
"if",
"is_batch_func",
":",
"# Batch process if the task supports it.",
"params",
"=",
"[",
"{",
"'args'",
":",
"task",
".",
"args",
",",
"'kwargs'",
":",
"task",
".",
"kwargs",
",",
"}",
"for",
"task",
"in",
"tasks",
"]",
"task_timeouts",
"=",
"[",
"task",
".",
"hard_timeout",
"for",
"task",
"in",
"tasks",
"if",
"task",
".",
"hard_timeout",
"is",
"not",
"None",
"]",
"hard_timeout",
"=",
"(",
"(",
"max",
"(",
"task_timeouts",
")",
"if",
"task_timeouts",
"else",
"None",
")",
"or",
"getattr",
"(",
"func",
",",
"'_task_hard_timeout'",
",",
"None",
")",
"or",
"self",
".",
"config",
"[",
"'DEFAULT_HARD_TIMEOUT'",
"]",
")",
"g",
"[",
"'current_tasks'",
"]",
"=",
"tasks",
"with",
"UnixSignalDeathPenalty",
"(",
"hard_timeout",
")",
":",
"func",
"(",
"params",
")",
"else",
":",
"# Process sequentially.",
"for",
"task",
"in",
"tasks",
":",
"hard_timeout",
"=",
"(",
"task",
".",
"hard_timeout",
"or",
"getattr",
"(",
"func",
",",
"'_task_hard_timeout'",
",",
"None",
")",
"or",
"self",
".",
"config",
"[",
"'DEFAULT_HARD_TIMEOUT'",
"]",
")",
"g",
"[",
"'current_tasks'",
"]",
"=",
"[",
"task",
"]",
"with",
"UnixSignalDeathPenalty",
"(",
"hard_timeout",
")",
":",
"func",
"(",
"*",
"task",
".",
"args",
",",
"*",
"*",
"task",
".",
"kwargs",
")",
"except",
"RetryException",
"as",
"exc",
":",
"execution",
"[",
"'retry'",
"]",
"=",
"True",
"if",
"exc",
".",
"method",
":",
"execution",
"[",
"'retry_method'",
"]",
"=",
"serialize_retry_method",
"(",
"exc",
".",
"method",
")",
"execution",
"[",
"'log_error'",
"]",
"=",
"exc",
".",
"log_error",
"execution",
"[",
"'exception_name'",
"]",
"=",
"serialize_func_name",
"(",
"exc",
".",
"__class__",
")",
"exc_info",
"=",
"exc",
".",
"exc_info",
"or",
"sys",
".",
"exc_info",
"(",
")",
"except",
"(",
"JobTimeoutException",
",",
"Exception",
")",
"as",
"exc",
":",
"execution",
"[",
"'exception_name'",
"]",
"=",
"serialize_func_name",
"(",
"exc",
".",
"__class__",
")",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"else",
":",
"success",
"=",
"True",
"if",
"not",
"success",
":",
"execution",
"[",
"'time_failed'",
"]",
"=",
"time",
".",
"time",
"(",
")",
"if",
"self",
".",
"store_tracebacks",
":",
"# Currently we only log failed task executions to Redis.",
"execution",
"[",
"'traceback'",
"]",
"=",
"''",
".",
"join",
"(",
"traceback",
".",
"format_exception",
"(",
"*",
"exc_info",
")",
")",
"execution",
"[",
"'success'",
"]",
"=",
"success",
"execution",
"[",
"'host'",
"]",
"=",
"socket",
".",
"gethostname",
"(",
")",
"serialized_execution",
"=",
"json",
".",
"dumps",
"(",
"execution",
")",
"for",
"task",
"in",
"tasks",
":",
"self",
".",
"connection",
".",
"rpush",
"(",
"self",
".",
"_key",
"(",
"'task'",
",",
"task",
".",
"id",
",",
"'executions'",
")",
",",
"serialized_execution",
")",
"return",
"success"
] |
Executes the tasks in the forked process. Multiple tasks can be passed
for batch processing. However, they must all use the same function and
will share the execution entry.
|
[
"Executes",
"the",
"tasks",
"in",
"the",
"forked",
"process",
".",
"Multiple",
"tasks",
"can",
"be",
"passed",
"for",
"batch",
"processing",
".",
"However",
"they",
"must",
"all",
"use",
"the",
"same",
"function",
"and",
"will",
"share",
"the",
"execution",
"entry",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L292-L370
|
236,893
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._get_queue_batch_size
|
def _get_queue_batch_size(self, queue):
"""Get queue batch size."""
# Fetch one item unless this is a batch queue.
# XXX: It would be more efficient to loop in reverse order and break.
batch_queues = self.config['BATCH_QUEUES']
batch_size = 1
for part in dotted_parts(queue):
if part in batch_queues:
batch_size = batch_queues[part]
return batch_size
|
python
|
def _get_queue_batch_size(self, queue):
"""Get queue batch size."""
# Fetch one item unless this is a batch queue.
# XXX: It would be more efficient to loop in reverse order and break.
batch_queues = self.config['BATCH_QUEUES']
batch_size = 1
for part in dotted_parts(queue):
if part in batch_queues:
batch_size = batch_queues[part]
return batch_size
|
[
"def",
"_get_queue_batch_size",
"(",
"self",
",",
"queue",
")",
":",
"# Fetch one item unless this is a batch queue.",
"# XXX: It would be more efficient to loop in reverse order and break.",
"batch_queues",
"=",
"self",
".",
"config",
"[",
"'BATCH_QUEUES'",
"]",
"batch_size",
"=",
"1",
"for",
"part",
"in",
"dotted_parts",
"(",
"queue",
")",
":",
"if",
"part",
"in",
"batch_queues",
":",
"batch_size",
"=",
"batch_queues",
"[",
"part",
"]",
"return",
"batch_size"
] |
Get queue batch size.
|
[
"Get",
"queue",
"batch",
"size",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L372-L383
|
236,894
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._get_queue_lock
|
def _get_queue_lock(self, queue, log):
"""Get queue lock for max worker queues.
For max worker queues it returns a Lock if acquired and whether
it failed to acquire the lock.
"""
max_workers = self.max_workers_per_queue
# Check if this is single worker queue
for part in dotted_parts(queue):
if part in self.single_worker_queues:
log.debug('single worker queue')
max_workers = 1
break
# Max worker queues require us to get a queue lock before
# moving tasks
if max_workers:
queue_lock = Semaphore(self.connection,
self._key(LOCK_REDIS_KEY, queue),
self.id, max_locks=max_workers,
timeout=self.config['ACTIVE_TASK_UPDATE_TIMEOUT'])
acquired, locks = queue_lock.acquire()
if not acquired:
return None, True
log.debug('acquired queue lock', locks=locks)
else:
queue_lock = None
return queue_lock, False
|
python
|
def _get_queue_lock(self, queue, log):
"""Get queue lock for max worker queues.
For max worker queues it returns a Lock if acquired and whether
it failed to acquire the lock.
"""
max_workers = self.max_workers_per_queue
# Check if this is single worker queue
for part in dotted_parts(queue):
if part in self.single_worker_queues:
log.debug('single worker queue')
max_workers = 1
break
# Max worker queues require us to get a queue lock before
# moving tasks
if max_workers:
queue_lock = Semaphore(self.connection,
self._key(LOCK_REDIS_KEY, queue),
self.id, max_locks=max_workers,
timeout=self.config['ACTIVE_TASK_UPDATE_TIMEOUT'])
acquired, locks = queue_lock.acquire()
if not acquired:
return None, True
log.debug('acquired queue lock', locks=locks)
else:
queue_lock = None
return queue_lock, False
|
[
"def",
"_get_queue_lock",
"(",
"self",
",",
"queue",
",",
"log",
")",
":",
"max_workers",
"=",
"self",
".",
"max_workers_per_queue",
"# Check if this is single worker queue",
"for",
"part",
"in",
"dotted_parts",
"(",
"queue",
")",
":",
"if",
"part",
"in",
"self",
".",
"single_worker_queues",
":",
"log",
".",
"debug",
"(",
"'single worker queue'",
")",
"max_workers",
"=",
"1",
"break",
"# Max worker queues require us to get a queue lock before",
"# moving tasks",
"if",
"max_workers",
":",
"queue_lock",
"=",
"Semaphore",
"(",
"self",
".",
"connection",
",",
"self",
".",
"_key",
"(",
"LOCK_REDIS_KEY",
",",
"queue",
")",
",",
"self",
".",
"id",
",",
"max_locks",
"=",
"max_workers",
",",
"timeout",
"=",
"self",
".",
"config",
"[",
"'ACTIVE_TASK_UPDATE_TIMEOUT'",
"]",
")",
"acquired",
",",
"locks",
"=",
"queue_lock",
".",
"acquire",
"(",
")",
"if",
"not",
"acquired",
":",
"return",
"None",
",",
"True",
"log",
".",
"debug",
"(",
"'acquired queue lock'",
",",
"locks",
"=",
"locks",
")",
"else",
":",
"queue_lock",
"=",
"None",
"return",
"queue_lock",
",",
"False"
] |
Get queue lock for max worker queues.
For max worker queues it returns a Lock if acquired and whether
it failed to acquire the lock.
|
[
"Get",
"queue",
"lock",
"for",
"max",
"worker",
"queues",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L385-L414
|
236,895
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._heartbeat
|
def _heartbeat(self, queue, task_ids):
"""
Updates the heartbeat for the given task IDs to prevent them from
timing out and being requeued.
"""
now = time.time()
self.connection.zadd(self._key(ACTIVE, queue),
**{task_id: now for task_id in task_ids})
|
python
|
def _heartbeat(self, queue, task_ids):
"""
Updates the heartbeat for the given task IDs to prevent them from
timing out and being requeued.
"""
now = time.time()
self.connection.zadd(self._key(ACTIVE, queue),
**{task_id: now for task_id in task_ids})
|
[
"def",
"_heartbeat",
"(",
"self",
",",
"queue",
",",
"task_ids",
")",
":",
"now",
"=",
"time",
".",
"time",
"(",
")",
"self",
".",
"connection",
".",
"zadd",
"(",
"self",
".",
"_key",
"(",
"ACTIVE",
",",
"queue",
")",
",",
"*",
"*",
"{",
"task_id",
":",
"now",
"for",
"task_id",
"in",
"task_ids",
"}",
")"
] |
Updates the heartbeat for the given task IDs to prevent them from
timing out and being requeued.
|
[
"Updates",
"the",
"heartbeat",
"for",
"the",
"given",
"task",
"IDs",
"to",
"prevent",
"them",
"from",
"timing",
"out",
"and",
"being",
"requeued",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L416-L423
|
236,896
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._execute
|
def _execute(self, queue, tasks, log, locks, queue_lock, all_task_ids):
"""
Executes the given tasks. Returns a boolean indicating whether
the tasks were executed successfully.
"""
# The tasks must use the same function.
assert len(tasks)
task_func = tasks[0].serialized_func
assert all([task_func == task.serialized_func for task in tasks[1:]])
# Before executing periodic tasks, queue them for the next period.
if task_func in self.tiger.periodic_task_funcs:
tasks[0]._queue_for_next_period()
with g_fork_lock:
child_pid = os.fork()
if child_pid == 0:
# Child process
log = log.bind(child_pid=os.getpid())
# Disconnect the Redis connection inherited from the main process.
# Note that this doesn't disconnect the socket in the main process.
self.connection.connection_pool.disconnect()
random.seed()
# Ignore Ctrl+C in the child so we don't abort the job -- the main
# process already takes care of a graceful shutdown.
signal.signal(signal.SIGINT, signal.SIG_IGN)
with WorkerContextManagerStack(self.config['CHILD_CONTEXT_MANAGERS']):
success = self._execute_forked(tasks, log)
# Wait for any threads that might be running in the child, just
# like sys.exit() would. Note we don't call sys.exit() directly
# because it would perform additional cleanup (e.g. calling atexit
# handlers twice). See also: https://bugs.python.org/issue18966
threading._shutdown()
os._exit(int(not success))
else:
# Main process
log = log.bind(child_pid=child_pid)
for task in tasks:
log.info('processing', func=task_func, task_id=task.id,
params={'args': task.args, 'kwargs': task.kwargs})
# Attach a signal handler to SIGCHLD (sent when the child process
# exits) so we can capture it.
signal.signal(signal.SIGCHLD, sigchld_handler)
# Since newer Python versions retry interrupted system calls we can't
# rely on the fact that select() is interrupted with EINTR. Instead,
# we'll set up a wake-up file descriptor below.
# Create a new pipe and apply the non-blocking flag (required for
# set_wakeup_fd).
pipe_r, pipe_w = os.pipe()
flags = fcntl.fcntl(pipe_w, fcntl.F_GETFL, 0)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(pipe_w, fcntl.F_SETFL, flags)
# A byte will be written to pipe_w if a signal occurs (and can be
# read from pipe_r).
old_wakeup_fd = signal.set_wakeup_fd(pipe_w)
def check_child_exit():
"""
Do a non-blocking check to see if the child process exited.
Returns None if the process is still running, or the exit code
value of the child process.
"""
try:
pid, return_code = os.waitpid(child_pid, os.WNOHANG)
if pid != 0: # The child process is done.
return return_code
except OSError as e:
# Of course EINTR can happen if the child process exits
# while we're checking whether it exited. In this case it
# should be safe to retry.
if e.errno == errno.EINTR:
return check_child_exit()
else:
raise
# Wait for the child to exit and perform a periodic heartbeat.
# We check for the child twice in this loop so that we avoid
# unnecessary waiting if the child exited just before entering
# the while loop or while renewing heartbeat/locks.
while True:
return_code = check_child_exit()
if return_code is not None:
break
# Wait until the timeout or a signal / child exit occurs.
try:
select.select([pipe_r], [], [],
self.config['ACTIVE_TASK_UPDATE_TIMER'])
except select.error as e:
if e.args[0] != errno.EINTR:
raise
return_code = check_child_exit()
if return_code is not None:
break
try:
self._heartbeat(queue, all_task_ids)
for lock in locks:
lock.renew(self.config['ACTIVE_TASK_UPDATE_TIMEOUT'])
if queue_lock:
acquired, current_locks = queue_lock.renew()
if not acquired:
log.debug('queue lock renew failure')
except OSError as e:
# EINTR happens if the task completed. Since we're just
# renewing locks/heartbeat it's okay if we get interrupted.
if e.errno != errno.EINTR:
raise
# Restore signals / clean up
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
signal.set_wakeup_fd(old_wakeup_fd)
os.close(pipe_r)
os.close(pipe_w)
success = (return_code == 0)
return success
|
python
|
def _execute(self, queue, tasks, log, locks, queue_lock, all_task_ids):
"""
Executes the given tasks. Returns a boolean indicating whether
the tasks were executed successfully.
"""
# The tasks must use the same function.
assert len(tasks)
task_func = tasks[0].serialized_func
assert all([task_func == task.serialized_func for task in tasks[1:]])
# Before executing periodic tasks, queue them for the next period.
if task_func in self.tiger.periodic_task_funcs:
tasks[0]._queue_for_next_period()
with g_fork_lock:
child_pid = os.fork()
if child_pid == 0:
# Child process
log = log.bind(child_pid=os.getpid())
# Disconnect the Redis connection inherited from the main process.
# Note that this doesn't disconnect the socket in the main process.
self.connection.connection_pool.disconnect()
random.seed()
# Ignore Ctrl+C in the child so we don't abort the job -- the main
# process already takes care of a graceful shutdown.
signal.signal(signal.SIGINT, signal.SIG_IGN)
with WorkerContextManagerStack(self.config['CHILD_CONTEXT_MANAGERS']):
success = self._execute_forked(tasks, log)
# Wait for any threads that might be running in the child, just
# like sys.exit() would. Note we don't call sys.exit() directly
# because it would perform additional cleanup (e.g. calling atexit
# handlers twice). See also: https://bugs.python.org/issue18966
threading._shutdown()
os._exit(int(not success))
else:
# Main process
log = log.bind(child_pid=child_pid)
for task in tasks:
log.info('processing', func=task_func, task_id=task.id,
params={'args': task.args, 'kwargs': task.kwargs})
# Attach a signal handler to SIGCHLD (sent when the child process
# exits) so we can capture it.
signal.signal(signal.SIGCHLD, sigchld_handler)
# Since newer Python versions retry interrupted system calls we can't
# rely on the fact that select() is interrupted with EINTR. Instead,
# we'll set up a wake-up file descriptor below.
# Create a new pipe and apply the non-blocking flag (required for
# set_wakeup_fd).
pipe_r, pipe_w = os.pipe()
flags = fcntl.fcntl(pipe_w, fcntl.F_GETFL, 0)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(pipe_w, fcntl.F_SETFL, flags)
# A byte will be written to pipe_w if a signal occurs (and can be
# read from pipe_r).
old_wakeup_fd = signal.set_wakeup_fd(pipe_w)
def check_child_exit():
"""
Do a non-blocking check to see if the child process exited.
Returns None if the process is still running, or the exit code
value of the child process.
"""
try:
pid, return_code = os.waitpid(child_pid, os.WNOHANG)
if pid != 0: # The child process is done.
return return_code
except OSError as e:
# Of course EINTR can happen if the child process exits
# while we're checking whether it exited. In this case it
# should be safe to retry.
if e.errno == errno.EINTR:
return check_child_exit()
else:
raise
# Wait for the child to exit and perform a periodic heartbeat.
# We check for the child twice in this loop so that we avoid
# unnecessary waiting if the child exited just before entering
# the while loop or while renewing heartbeat/locks.
while True:
return_code = check_child_exit()
if return_code is not None:
break
# Wait until the timeout or a signal / child exit occurs.
try:
select.select([pipe_r], [], [],
self.config['ACTIVE_TASK_UPDATE_TIMER'])
except select.error as e:
if e.args[0] != errno.EINTR:
raise
return_code = check_child_exit()
if return_code is not None:
break
try:
self._heartbeat(queue, all_task_ids)
for lock in locks:
lock.renew(self.config['ACTIVE_TASK_UPDATE_TIMEOUT'])
if queue_lock:
acquired, current_locks = queue_lock.renew()
if not acquired:
log.debug('queue lock renew failure')
except OSError as e:
# EINTR happens if the task completed. Since we're just
# renewing locks/heartbeat it's okay if we get interrupted.
if e.errno != errno.EINTR:
raise
# Restore signals / clean up
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
signal.set_wakeup_fd(old_wakeup_fd)
os.close(pipe_r)
os.close(pipe_w)
success = (return_code == 0)
return success
|
[
"def",
"_execute",
"(",
"self",
",",
"queue",
",",
"tasks",
",",
"log",
",",
"locks",
",",
"queue_lock",
",",
"all_task_ids",
")",
":",
"# The tasks must use the same function.",
"assert",
"len",
"(",
"tasks",
")",
"task_func",
"=",
"tasks",
"[",
"0",
"]",
".",
"serialized_func",
"assert",
"all",
"(",
"[",
"task_func",
"==",
"task",
".",
"serialized_func",
"for",
"task",
"in",
"tasks",
"[",
"1",
":",
"]",
"]",
")",
"# Before executing periodic tasks, queue them for the next period.",
"if",
"task_func",
"in",
"self",
".",
"tiger",
".",
"periodic_task_funcs",
":",
"tasks",
"[",
"0",
"]",
".",
"_queue_for_next_period",
"(",
")",
"with",
"g_fork_lock",
":",
"child_pid",
"=",
"os",
".",
"fork",
"(",
")",
"if",
"child_pid",
"==",
"0",
":",
"# Child process",
"log",
"=",
"log",
".",
"bind",
"(",
"child_pid",
"=",
"os",
".",
"getpid",
"(",
")",
")",
"# Disconnect the Redis connection inherited from the main process.",
"# Note that this doesn't disconnect the socket in the main process.",
"self",
".",
"connection",
".",
"connection_pool",
".",
"disconnect",
"(",
")",
"random",
".",
"seed",
"(",
")",
"# Ignore Ctrl+C in the child so we don't abort the job -- the main",
"# process already takes care of a graceful shutdown.",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"signal",
".",
"SIG_IGN",
")",
"with",
"WorkerContextManagerStack",
"(",
"self",
".",
"config",
"[",
"'CHILD_CONTEXT_MANAGERS'",
"]",
")",
":",
"success",
"=",
"self",
".",
"_execute_forked",
"(",
"tasks",
",",
"log",
")",
"# Wait for any threads that might be running in the child, just",
"# like sys.exit() would. Note we don't call sys.exit() directly",
"# because it would perform additional cleanup (e.g. calling atexit",
"# handlers twice). See also: https://bugs.python.org/issue18966",
"threading",
".",
"_shutdown",
"(",
")",
"os",
".",
"_exit",
"(",
"int",
"(",
"not",
"success",
")",
")",
"else",
":",
"# Main process",
"log",
"=",
"log",
".",
"bind",
"(",
"child_pid",
"=",
"child_pid",
")",
"for",
"task",
"in",
"tasks",
":",
"log",
".",
"info",
"(",
"'processing'",
",",
"func",
"=",
"task_func",
",",
"task_id",
"=",
"task",
".",
"id",
",",
"params",
"=",
"{",
"'args'",
":",
"task",
".",
"args",
",",
"'kwargs'",
":",
"task",
".",
"kwargs",
"}",
")",
"# Attach a signal handler to SIGCHLD (sent when the child process",
"# exits) so we can capture it.",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGCHLD",
",",
"sigchld_handler",
")",
"# Since newer Python versions retry interrupted system calls we can't",
"# rely on the fact that select() is interrupted with EINTR. Instead,",
"# we'll set up a wake-up file descriptor below.",
"# Create a new pipe and apply the non-blocking flag (required for",
"# set_wakeup_fd).",
"pipe_r",
",",
"pipe_w",
"=",
"os",
".",
"pipe",
"(",
")",
"flags",
"=",
"fcntl",
".",
"fcntl",
"(",
"pipe_w",
",",
"fcntl",
".",
"F_GETFL",
",",
"0",
")",
"flags",
"=",
"flags",
"|",
"os",
".",
"O_NONBLOCK",
"fcntl",
".",
"fcntl",
"(",
"pipe_w",
",",
"fcntl",
".",
"F_SETFL",
",",
"flags",
")",
"# A byte will be written to pipe_w if a signal occurs (and can be",
"# read from pipe_r).",
"old_wakeup_fd",
"=",
"signal",
".",
"set_wakeup_fd",
"(",
"pipe_w",
")",
"def",
"check_child_exit",
"(",
")",
":",
"\"\"\"\n Do a non-blocking check to see if the child process exited.\n Returns None if the process is still running, or the exit code\n value of the child process.\n \"\"\"",
"try",
":",
"pid",
",",
"return_code",
"=",
"os",
".",
"waitpid",
"(",
"child_pid",
",",
"os",
".",
"WNOHANG",
")",
"if",
"pid",
"!=",
"0",
":",
"# The child process is done.",
"return",
"return_code",
"except",
"OSError",
"as",
"e",
":",
"# Of course EINTR can happen if the child process exits",
"# while we're checking whether it exited. In this case it",
"# should be safe to retry.",
"if",
"e",
".",
"errno",
"==",
"errno",
".",
"EINTR",
":",
"return",
"check_child_exit",
"(",
")",
"else",
":",
"raise",
"# Wait for the child to exit and perform a periodic heartbeat.",
"# We check for the child twice in this loop so that we avoid",
"# unnecessary waiting if the child exited just before entering",
"# the while loop or while renewing heartbeat/locks.",
"while",
"True",
":",
"return_code",
"=",
"check_child_exit",
"(",
")",
"if",
"return_code",
"is",
"not",
"None",
":",
"break",
"# Wait until the timeout or a signal / child exit occurs.",
"try",
":",
"select",
".",
"select",
"(",
"[",
"pipe_r",
"]",
",",
"[",
"]",
",",
"[",
"]",
",",
"self",
".",
"config",
"[",
"'ACTIVE_TASK_UPDATE_TIMER'",
"]",
")",
"except",
"select",
".",
"error",
"as",
"e",
":",
"if",
"e",
".",
"args",
"[",
"0",
"]",
"!=",
"errno",
".",
"EINTR",
":",
"raise",
"return_code",
"=",
"check_child_exit",
"(",
")",
"if",
"return_code",
"is",
"not",
"None",
":",
"break",
"try",
":",
"self",
".",
"_heartbeat",
"(",
"queue",
",",
"all_task_ids",
")",
"for",
"lock",
"in",
"locks",
":",
"lock",
".",
"renew",
"(",
"self",
".",
"config",
"[",
"'ACTIVE_TASK_UPDATE_TIMEOUT'",
"]",
")",
"if",
"queue_lock",
":",
"acquired",
",",
"current_locks",
"=",
"queue_lock",
".",
"renew",
"(",
")",
"if",
"not",
"acquired",
":",
"log",
".",
"debug",
"(",
"'queue lock renew failure'",
")",
"except",
"OSError",
"as",
"e",
":",
"# EINTR happens if the task completed. Since we're just",
"# renewing locks/heartbeat it's okay if we get interrupted.",
"if",
"e",
".",
"errno",
"!=",
"errno",
".",
"EINTR",
":",
"raise",
"# Restore signals / clean up",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGCHLD",
",",
"signal",
".",
"SIG_DFL",
")",
"signal",
".",
"set_wakeup_fd",
"(",
"old_wakeup_fd",
")",
"os",
".",
"close",
"(",
"pipe_r",
")",
"os",
".",
"close",
"(",
"pipe_w",
")",
"success",
"=",
"(",
"return_code",
"==",
"0",
")",
"return",
"success"
] |
Executes the given tasks. Returns a boolean indicating whether
the tasks were executed successfully.
|
[
"Executes",
"the",
"given",
"tasks",
".",
"Returns",
"a",
"boolean",
"indicating",
"whether",
"the",
"tasks",
"were",
"executed",
"successfully",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L425-L554
|
236,897
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._process_queue_message
|
def _process_queue_message(self, message_queue, new_queue_found, batch_exit,
start_time, timeout, batch_timeout):
"""Process a queue message from activity channel."""
for queue in self._filter_queues([message_queue]):
if queue not in self._queue_set:
if not new_queue_found:
new_queue_found = True
batch_exit = time.time() + batch_timeout
# Limit batch_exit to max timeout
if batch_exit > start_time + timeout:
batch_exit = start_time + timeout
self._queue_set.add(queue)
self.log.debug('new queue', queue=queue)
return new_queue_found, batch_exit
|
python
|
def _process_queue_message(self, message_queue, new_queue_found, batch_exit,
start_time, timeout, batch_timeout):
"""Process a queue message from activity channel."""
for queue in self._filter_queues([message_queue]):
if queue not in self._queue_set:
if not new_queue_found:
new_queue_found = True
batch_exit = time.time() + batch_timeout
# Limit batch_exit to max timeout
if batch_exit > start_time + timeout:
batch_exit = start_time + timeout
self._queue_set.add(queue)
self.log.debug('new queue', queue=queue)
return new_queue_found, batch_exit
|
[
"def",
"_process_queue_message",
"(",
"self",
",",
"message_queue",
",",
"new_queue_found",
",",
"batch_exit",
",",
"start_time",
",",
"timeout",
",",
"batch_timeout",
")",
":",
"for",
"queue",
"in",
"self",
".",
"_filter_queues",
"(",
"[",
"message_queue",
"]",
")",
":",
"if",
"queue",
"not",
"in",
"self",
".",
"_queue_set",
":",
"if",
"not",
"new_queue_found",
":",
"new_queue_found",
"=",
"True",
"batch_exit",
"=",
"time",
".",
"time",
"(",
")",
"+",
"batch_timeout",
"# Limit batch_exit to max timeout",
"if",
"batch_exit",
">",
"start_time",
"+",
"timeout",
":",
"batch_exit",
"=",
"start_time",
"+",
"timeout",
"self",
".",
"_queue_set",
".",
"add",
"(",
"queue",
")",
"self",
".",
"log",
".",
"debug",
"(",
"'new queue'",
",",
"queue",
"=",
"queue",
")",
"return",
"new_queue_found",
",",
"batch_exit"
] |
Process a queue message from activity channel.
|
[
"Process",
"a",
"queue",
"message",
"from",
"activity",
"channel",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L556-L571
|
236,898
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._process_queue_tasks
|
def _process_queue_tasks(self, queue, queue_lock, task_ids, now, log):
"""Process tasks in queue."""
processed_count = 0
# Get all tasks
serialized_tasks = self.connection.mget([
self._key('task', task_id) for task_id in task_ids
])
# Parse tasks
tasks = []
for task_id, serialized_task in zip(task_ids, serialized_tasks):
if serialized_task:
task_data = json.loads(serialized_task)
else:
# In the rare case where we don't find the task which is
# queued (see ReliabilityTestCase.test_task_disappears),
# we log an error and remove the task below. We need to
# at least initialize the Task object with an ID so we can
# remove it.
task_data = {'id': task_id}
task = Task(self.tiger, queue=queue, _data=task_data,
_state=ACTIVE, _ts=now)
if not serialized_task:
# Remove task as per comment above
log.error('not found', task_id=task_id)
task._move()
elif task.id != task_id:
log.error('task ID mismatch', task_id=task_id)
# Remove task
task._move()
else:
tasks.append(task)
# List of task IDs that exist and we will update the heartbeat on.
valid_task_ids = set(task.id for task in tasks)
# Group by task func
tasks_by_func = OrderedDict()
for task in tasks:
func = task.serialized_func
if func in tasks_by_func:
tasks_by_func[func].append(task)
else:
tasks_by_func[func] = [task]
# Execute tasks for each task func
for tasks in tasks_by_func.values():
success, processed_tasks = self._execute_task_group(queue,
tasks, valid_task_ids, queue_lock)
processed_count = processed_count + len(processed_tasks)
log.debug('processed', attempted=len(tasks),
processed=processed_count)
for task in processed_tasks:
self._finish_task_processing(queue, task, success)
return processed_count
|
python
|
def _process_queue_tasks(self, queue, queue_lock, task_ids, now, log):
"""Process tasks in queue."""
processed_count = 0
# Get all tasks
serialized_tasks = self.connection.mget([
self._key('task', task_id) for task_id in task_ids
])
# Parse tasks
tasks = []
for task_id, serialized_task in zip(task_ids, serialized_tasks):
if serialized_task:
task_data = json.loads(serialized_task)
else:
# In the rare case where we don't find the task which is
# queued (see ReliabilityTestCase.test_task_disappears),
# we log an error and remove the task below. We need to
# at least initialize the Task object with an ID so we can
# remove it.
task_data = {'id': task_id}
task = Task(self.tiger, queue=queue, _data=task_data,
_state=ACTIVE, _ts=now)
if not serialized_task:
# Remove task as per comment above
log.error('not found', task_id=task_id)
task._move()
elif task.id != task_id:
log.error('task ID mismatch', task_id=task_id)
# Remove task
task._move()
else:
tasks.append(task)
# List of task IDs that exist and we will update the heartbeat on.
valid_task_ids = set(task.id for task in tasks)
# Group by task func
tasks_by_func = OrderedDict()
for task in tasks:
func = task.serialized_func
if func in tasks_by_func:
tasks_by_func[func].append(task)
else:
tasks_by_func[func] = [task]
# Execute tasks for each task func
for tasks in tasks_by_func.values():
success, processed_tasks = self._execute_task_group(queue,
tasks, valid_task_ids, queue_lock)
processed_count = processed_count + len(processed_tasks)
log.debug('processed', attempted=len(tasks),
processed=processed_count)
for task in processed_tasks:
self._finish_task_processing(queue, task, success)
return processed_count
|
[
"def",
"_process_queue_tasks",
"(",
"self",
",",
"queue",
",",
"queue_lock",
",",
"task_ids",
",",
"now",
",",
"log",
")",
":",
"processed_count",
"=",
"0",
"# Get all tasks",
"serialized_tasks",
"=",
"self",
".",
"connection",
".",
"mget",
"(",
"[",
"self",
".",
"_key",
"(",
"'task'",
",",
"task_id",
")",
"for",
"task_id",
"in",
"task_ids",
"]",
")",
"# Parse tasks",
"tasks",
"=",
"[",
"]",
"for",
"task_id",
",",
"serialized_task",
"in",
"zip",
"(",
"task_ids",
",",
"serialized_tasks",
")",
":",
"if",
"serialized_task",
":",
"task_data",
"=",
"json",
".",
"loads",
"(",
"serialized_task",
")",
"else",
":",
"# In the rare case where we don't find the task which is",
"# queued (see ReliabilityTestCase.test_task_disappears),",
"# we log an error and remove the task below. We need to",
"# at least initialize the Task object with an ID so we can",
"# remove it.",
"task_data",
"=",
"{",
"'id'",
":",
"task_id",
"}",
"task",
"=",
"Task",
"(",
"self",
".",
"tiger",
",",
"queue",
"=",
"queue",
",",
"_data",
"=",
"task_data",
",",
"_state",
"=",
"ACTIVE",
",",
"_ts",
"=",
"now",
")",
"if",
"not",
"serialized_task",
":",
"# Remove task as per comment above",
"log",
".",
"error",
"(",
"'not found'",
",",
"task_id",
"=",
"task_id",
")",
"task",
".",
"_move",
"(",
")",
"elif",
"task",
".",
"id",
"!=",
"task_id",
":",
"log",
".",
"error",
"(",
"'task ID mismatch'",
",",
"task_id",
"=",
"task_id",
")",
"# Remove task",
"task",
".",
"_move",
"(",
")",
"else",
":",
"tasks",
".",
"append",
"(",
"task",
")",
"# List of task IDs that exist and we will update the heartbeat on.",
"valid_task_ids",
"=",
"set",
"(",
"task",
".",
"id",
"for",
"task",
"in",
"tasks",
")",
"# Group by task func",
"tasks_by_func",
"=",
"OrderedDict",
"(",
")",
"for",
"task",
"in",
"tasks",
":",
"func",
"=",
"task",
".",
"serialized_func",
"if",
"func",
"in",
"tasks_by_func",
":",
"tasks_by_func",
"[",
"func",
"]",
".",
"append",
"(",
"task",
")",
"else",
":",
"tasks_by_func",
"[",
"func",
"]",
"=",
"[",
"task",
"]",
"# Execute tasks for each task func",
"for",
"tasks",
"in",
"tasks_by_func",
".",
"values",
"(",
")",
":",
"success",
",",
"processed_tasks",
"=",
"self",
".",
"_execute_task_group",
"(",
"queue",
",",
"tasks",
",",
"valid_task_ids",
",",
"queue_lock",
")",
"processed_count",
"=",
"processed_count",
"+",
"len",
"(",
"processed_tasks",
")",
"log",
".",
"debug",
"(",
"'processed'",
",",
"attempted",
"=",
"len",
"(",
"tasks",
")",
",",
"processed",
"=",
"processed_count",
")",
"for",
"task",
"in",
"processed_tasks",
":",
"self",
".",
"_finish_task_processing",
"(",
"queue",
",",
"task",
",",
"success",
")",
"return",
"processed_count"
] |
Process tasks in queue.
|
[
"Process",
"tasks",
"in",
"queue",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L573-L632
|
236,899
|
closeio/tasktiger
|
tasktiger/worker.py
|
Worker._process_from_queue
|
def _process_from_queue(self, queue):
"""
Internal method to process a task batch from the given queue.
Args:
queue: Queue name to be processed
Returns:
Task IDs: List of tasks that were processed (even if there was an
error so that client code can assume the queue is empty
if nothing was returned)
Count: The number of tasks that were attempted to be executed or
-1 if the queue lock couldn't be acquired.
"""
now = time.time()
log = self.log.bind(queue=queue)
batch_size = self._get_queue_batch_size(queue)
queue_lock, failed_to_acquire = self._get_queue_lock(queue, log)
if failed_to_acquire:
return [], -1
# Move an item to the active queue, if available.
# We need to be careful when moving unique tasks: We currently don't
# support concurrent processing of multiple unique tasks. If the task
# is already in the ACTIVE queue, we need to execute the queued task
# later, i.e. move it to the SCHEDULED queue (prefer the earliest
# time if it's already scheduled). We want to make sure that the last
# queued instance of the task always gets executed no earlier than it
# was queued.
later = time.time() + self.config['LOCK_RETRY']
task_ids = self.scripts.zpoppush(
self._key(QUEUED, queue),
self._key(ACTIVE, queue),
batch_size,
None,
now,
if_exists=('add', self._key(SCHEDULED, queue), later, 'min'),
on_success=('update_sets', queue, self._key(QUEUED),
self._key(ACTIVE), self._key(SCHEDULED))
)
log.debug('moved tasks', src_queue=QUEUED, dest_queue=ACTIVE,
qty=len(task_ids))
processed_count = 0
if task_ids:
processed_count = self._process_queue_tasks(queue, queue_lock,
task_ids, now, log)
if queue_lock:
queue_lock.release()
log.debug('released swq lock')
return task_ids, processed_count
|
python
|
def _process_from_queue(self, queue):
"""
Internal method to process a task batch from the given queue.
Args:
queue: Queue name to be processed
Returns:
Task IDs: List of tasks that were processed (even if there was an
error so that client code can assume the queue is empty
if nothing was returned)
Count: The number of tasks that were attempted to be executed or
-1 if the queue lock couldn't be acquired.
"""
now = time.time()
log = self.log.bind(queue=queue)
batch_size = self._get_queue_batch_size(queue)
queue_lock, failed_to_acquire = self._get_queue_lock(queue, log)
if failed_to_acquire:
return [], -1
# Move an item to the active queue, if available.
# We need to be careful when moving unique tasks: We currently don't
# support concurrent processing of multiple unique tasks. If the task
# is already in the ACTIVE queue, we need to execute the queued task
# later, i.e. move it to the SCHEDULED queue (prefer the earliest
# time if it's already scheduled). We want to make sure that the last
# queued instance of the task always gets executed no earlier than it
# was queued.
later = time.time() + self.config['LOCK_RETRY']
task_ids = self.scripts.zpoppush(
self._key(QUEUED, queue),
self._key(ACTIVE, queue),
batch_size,
None,
now,
if_exists=('add', self._key(SCHEDULED, queue), later, 'min'),
on_success=('update_sets', queue, self._key(QUEUED),
self._key(ACTIVE), self._key(SCHEDULED))
)
log.debug('moved tasks', src_queue=QUEUED, dest_queue=ACTIVE,
qty=len(task_ids))
processed_count = 0
if task_ids:
processed_count = self._process_queue_tasks(queue, queue_lock,
task_ids, now, log)
if queue_lock:
queue_lock.release()
log.debug('released swq lock')
return task_ids, processed_count
|
[
"def",
"_process_from_queue",
"(",
"self",
",",
"queue",
")",
":",
"now",
"=",
"time",
".",
"time",
"(",
")",
"log",
"=",
"self",
".",
"log",
".",
"bind",
"(",
"queue",
"=",
"queue",
")",
"batch_size",
"=",
"self",
".",
"_get_queue_batch_size",
"(",
"queue",
")",
"queue_lock",
",",
"failed_to_acquire",
"=",
"self",
".",
"_get_queue_lock",
"(",
"queue",
",",
"log",
")",
"if",
"failed_to_acquire",
":",
"return",
"[",
"]",
",",
"-",
"1",
"# Move an item to the active queue, if available.",
"# We need to be careful when moving unique tasks: We currently don't",
"# support concurrent processing of multiple unique tasks. If the task",
"# is already in the ACTIVE queue, we need to execute the queued task",
"# later, i.e. move it to the SCHEDULED queue (prefer the earliest",
"# time if it's already scheduled). We want to make sure that the last",
"# queued instance of the task always gets executed no earlier than it",
"# was queued.",
"later",
"=",
"time",
".",
"time",
"(",
")",
"+",
"self",
".",
"config",
"[",
"'LOCK_RETRY'",
"]",
"task_ids",
"=",
"self",
".",
"scripts",
".",
"zpoppush",
"(",
"self",
".",
"_key",
"(",
"QUEUED",
",",
"queue",
")",
",",
"self",
".",
"_key",
"(",
"ACTIVE",
",",
"queue",
")",
",",
"batch_size",
",",
"None",
",",
"now",
",",
"if_exists",
"=",
"(",
"'add'",
",",
"self",
".",
"_key",
"(",
"SCHEDULED",
",",
"queue",
")",
",",
"later",
",",
"'min'",
")",
",",
"on_success",
"=",
"(",
"'update_sets'",
",",
"queue",
",",
"self",
".",
"_key",
"(",
"QUEUED",
")",
",",
"self",
".",
"_key",
"(",
"ACTIVE",
")",
",",
"self",
".",
"_key",
"(",
"SCHEDULED",
")",
")",
")",
"log",
".",
"debug",
"(",
"'moved tasks'",
",",
"src_queue",
"=",
"QUEUED",
",",
"dest_queue",
"=",
"ACTIVE",
",",
"qty",
"=",
"len",
"(",
"task_ids",
")",
")",
"processed_count",
"=",
"0",
"if",
"task_ids",
":",
"processed_count",
"=",
"self",
".",
"_process_queue_tasks",
"(",
"queue",
",",
"queue_lock",
",",
"task_ids",
",",
"now",
",",
"log",
")",
"if",
"queue_lock",
":",
"queue_lock",
".",
"release",
"(",
")",
"log",
".",
"debug",
"(",
"'released swq lock'",
")",
"return",
"task_ids",
",",
"processed_count"
] |
Internal method to process a task batch from the given queue.
Args:
queue: Queue name to be processed
Returns:
Task IDs: List of tasks that were processed (even if there was an
error so that client code can assume the queue is empty
if nothing was returned)
Count: The number of tasks that were attempted to be executed or
-1 if the queue lock couldn't be acquired.
|
[
"Internal",
"method",
"to",
"process",
"a",
"task",
"batch",
"from",
"the",
"given",
"queue",
"."
] |
59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a
|
https://github.com/closeio/tasktiger/blob/59f893152d6eb4b7f1f62fc4b35aeeca7f26c07a/tasktiger/worker.py#L634-L690
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.