repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
trustyou/tyluigiutils
|
setup.py
|
1
|
1848
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""The setup script."""
import os.path
from setuptools import setup, find_packages
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read()
def read(fname):
with open(fname) as fp:
content = fp.read()
return content
REQUIREMENTS_FOLDER = os.getenv('REQUIREMENTS_PATH', '')
requirements = [line.strip() for line in open(os.path.join(REQUIREMENTS_FOLDER, "requirements.txt"), 'r')]
test_requirements = [line.strip() for line in open(os.path.join(REQUIREMENTS_FOLDER, "requirements_dev.txt"), 'r')]
setup_requirements = [
'pytest-runner',
]
test_requirements = [
'pytest',
# TODO: put package test requirements here
]
setup(
name='tyluigiutils',
version='0.2.0',
description="Misc Luigi related code used by TrustYou ",
long_description=readme + '\n\n' + history,
author="Miguel Cabrera",
author_email='mfcabrera@gmail.com',
url='https://github.com/mfcabrera/tyluigiutils',
packages=find_packages('.'),
include_package_data=True,
install_requires=requirements,
license="MIT license",
zip_safe=False,
keywords='tyluigiutils',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
test_suite='tests',
tests_require=test_requirements,
setup_requires=setup_requirements,
)
|
mit
| 1,872,680,828,354,485,800
| 27
| 115
| 0.643398
| false
| 3.652174
| true
| false
| false
|
nyu-dl/WebNav
|
op_link.py
|
1
|
5077
|
'''
Custom theano class to access page links.
'''
import numpy as np
import theano
from theano import gof
from theano import tensor
import time
import parameters as prm
import utils
class Link(theano.Op):
__props__ = ()
def __init__(self, wiki, wikipre, vocab):
self.wiki = wiki
self.wikipre = wikipre
self.vocab = vocab
self.mem = {}
def make_node(self, x, x2, x3, x4, x5):
# check that the theano version has support for __props__.
# This next line looks like it has a typo,
# but it's actually a way to detect the theano version
# is sufficiently recent to support the use of __props__.
assert hasattr(self, '_props'), "Your version of theano is too old to support __props__."
x = tensor.as_tensor_variable(x)
x2 = tensor.as_tensor_variable(x2)
x3 = tensor.as_tensor_variable(x3)
x4 = tensor.as_tensor_variable(x4)
x5 = tensor.as_tensor_variable(x5)
if prm.att_doc:
if prm.compute_emb:
td = tensor.itensor4().type()
else:
td = tensor.ftensor4().type()
tm = tensor.ftensor3().type()
else:
if prm.compute_emb:
td = tensor.itensor3().type()
else:
td = tensor.ftensor3().type()
tm = tensor.fmatrix().type()
return theano.Apply(self, [x,x2,x3,x4,x5], [td, tm, \
tensor.fmatrix().type(), tensor.ivector().type()])
def perform(self, node, inputs, output_storage):
#st = time.time()
pages_id = inputs[0]
p_truth = inputs[1]
it = int(inputs[2])
uidx = int(inputs[3])
k_beam = int(inputs[4])
run = True
if uidx in self.mem:
if it in self.mem[uidx]:
L, L_m, l_page_id, l_truth = self.mem[uidx][it]
run = False
if run:
max_links = k_beam
lst_links = []
for i, page_id in enumerate(pages_id):
if int(page_id) != -1:
links = self.wiki.get_article_links(page_id)
links = list(set(links)) # remove duplicates.
links.sort() # h5py only accepts sorted indexes.
lst_links.append(links)
if len(links) > max_links:
max_links = len(links)
else:
lst_links.append([])
if prm.att_doc:
if prm.compute_emb:
L = np.zeros((len(pages_id), max_links, prm.max_segs_doc, prm.max_words), np.int32)
else:
L = np.zeros((len(pages_id), max_links, prm.max_segs_doc, prm.dim_emb), np.float32)
L_m = np.zeros((len(pages_id), max_links, prm.max_segs_doc), np.float32)
else:
if prm.compute_emb:
L = np.zeros((len(pages_id), max_links, prm.max_words), np.int32)
else:
L = np.zeros((len(pages_id), max_links, prm.dim_emb), np.float32)
L_m = np.zeros((len(pages_id), max_links), np.float32)
l_page_id = -np.ones((len(pages_id), max_links+1), np.float32) # '+1' to consider stop action.
l_truth = np.zeros((len(pages_id)), np.int32)
for i, links in enumerate(lst_links):
if len(links) > 0:
if prm.compute_emb:
# retrieve the precomputed indexes.
links_c = self.wikipre.f['idx'][links]
else:
# retrieve the precomputed embeddings.
links_c = self.wikipre.f['emb'][links]
if prm.att_doc:
L[i,:len(links),:,:] = links_c
links_mask = self.wikipre.f['mask'][links]
for k, link_mask in enumerate(links_mask):
L_m[i,k,:link_mask] = 1.0
else:
L[i,:len(links),:] = links_c
L_m[i,:len(links)] = 1.0
l_page_id[i,1:len(links)+1] = links # +1 because of the stop action.
for k, link_id in enumerate(links):
if link_id == p_truth[i]:
l_truth[i] = k + 1 # +1 because of the stop action.
if uidx in self.mem:
self.mem[uidx][it] = [L, L_m, l_page_id, l_truth]
else:
self.mem = {uidx: {it: [L, L_m, l_page_id, l_truth]}}
output_storage[0][0] = L
output_storage[1][0] = L_m
output_storage[2][0] = l_page_id
output_storage[3][0] = l_truth
#print 'uidx', uidx, 'it', it, 'time Link op:', str(time.time() - st)
def grad(self, inputs, output_grads):
return [tensor.zeros_like(ii, dtype=theano.config.floatX) for ii in inputs]
|
bsd-3-clause
| -5,717,748,019,315,914,000
| 36.88806
| 106
| 0.478038
| false
| 3.618674
| false
| false
| false
|
michaeltelford/gatecrasher
|
UDP.py
|
1
|
1267
|
# Network module used for all UDP networking aspects of the Gatecrasher script.
# Developed by Michael Telford.
import socket
# Initializes socket with datagram proto and binds to port arg.
def bind(port):
global s
host = ''
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind((host, port))
def send(addr, port):
global s
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
data = "gatecrasher request"
address = (addr, port)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
s.sendto(data, address)
def receive(timeout):
global s
s.settimeout(timeout)
while 1:
try:
string, address = s.recvfrom(1024)
return True
except socket.timeout:
return False
def receive_echo(port):
global s
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.bind(('', port))
# Block until receive and then echo loop (continuous).
while 1:
string, address = s.recvfrom(1024)
s.sendto(string, address)
def close():
global s
try:
s.shutdown(socket.SHUT_RDWR)
s.close()
except socket.error:
pass
# End of module.
|
mit
| -2,328,868,848,086,685,000
| 19.770492
| 79
| 0.629045
| false
| 3.539106
| false
| false
| false
|
fhorinek/pi8bit
|
py8bit/opt/controller.py
|
1
|
45182
|
from collections import OrderedDict
from cell import High, Low, Invisible
import wire
import cell
import pygame
import utils
from pygame import Rect
LEFT = 1
MID = 2
RIGHT = 3
WHEEL_UP = 4
WHEEL_DOWN = 5
MODE_IDLE = 0
MODE_MOVE = 1
MODE_ADD = 2
MODE_DEL = 3
MODE_WIRE = 4
MODE_PAN = 5
MODE_SELECT = 6
MODE_EDIT = 7
MODE_ADD_MODULE = 8
MODE_STEP = 9
MODE_RENAME = 10
NODE_DIR_NA = 0
NODE_DIR_FROM_NODE = 1
NODE_DIR_FROM_INPUT = 2
NODE_DIR_FROM_OUTPUT = 3
LIGHT_NONE = 0
LIGHT_POINT = 1
LIGHT_LINE = 2
class Controller():
def __init__(self, canvas, parent):
self.canvas = canvas
self.parent = parent
self.objects = OrderedDict()
self.objects["LOW"] = Low(self)
self.objects["HIGH"] = High(self)
self.selected = []
self.select = False
self.select_start = False
self.select_rect = Rect(0, 0, 0, 0)
self.possible_move = False
self.pan = False
self.pan_x = 0
self.pan_y = 0
self.pan_offset_x = 0
self.pan_offset_y = 0
self.new_node = False
self.new_node_direction = NODE_DIR_NA
self.zoom = 1.0
self.zoom_step = 0.1
self.obj_id = 0
self.net_id = 0
self.highlight_mode = LIGHT_NONE
self.highlight_pos = False
self.add_index = 0
self.add_list = ["label", "and", "or", "nand", "nor", "xor", "not", "diode", "led", "hex", "tgl", "push", "clk", "input", "output", "memory"]
self.font = pygame.font.Font(pygame.font.get_default_font(), int(self.canvas.style["d_font"] * self.zoom))
self.label_font = pygame.font.Font(pygame.font.get_default_font(), int(self.canvas.style["d_label_font"] * self.zoom))
self.need_solve_drawable = True
self.drawable = []
self.read_only = True
def highlight(self, mode, pos = False):
self.highlight_mode = mode
self.highlight_pos = pos
self.canvas.request_io_redraw()
def get_obj_id(self):
self.obj_id += 1
return self.obj_id
def get_net_id(self):
self.net_id += 1
return self.net_id
def normalize_positons(self):
big_rect = False
for k in self.objects:
o = self.objects[k]
if not isinstance(o, Invisible):
if big_rect:
big_rect = big_rect.union(o.rect)
else:
big_rect = o.rect
offset_x = big_rect[0]
offset_y = big_rect[1]
for k in self.objects:
o = self.objects[k]
pos_x = o.rect[0] - offset_x
pos_y = o.rect[1] - offset_y
o.set_pos(pos_x, pos_y)
def write_file(self, filename):
if self.read_only:
return
lines = ""
self.normalize_positons()
# print "Writing file", filename
line_n = 0
for k in self.objects:
if k in ["HIGH", "LOW"]:
continue
o = self.objects[k]
name = o.name
fcs = o.fcs
p = o.get_params()
if p == False:
continue
params = " ".join(p)
line = "\t".join([name, fcs, params])
lines += "%s\n" % line
# print " %5d: %s" % (line_n, line)
line_n += 1
f = open(filename, "w")
f.write(lines)
f.close()
# print "done", filename
def read_file(self, filename):
print "Reading file", filename
try:
f = open(filename, "r")
data = f.readlines()
f.close()
self.create_objects(data)
print "done", filename
return True
except IOError as e:
print "not found", e
return False
def create_objects(self, data):
params = OrderedDict()
line_n = 0
for line in data:
line_n += 1
arr = line.split()
print " %5d: %s" % (line_n, " ".join(arr))
if (len(arr) < 2):
continue
name = arr[0]
fcs = arr[1]
#calc obj id
s = name.split("_")
if len(s) == 4 and s[0] == "" and s[1] == "":
try:
obj_id = int(s[3])
self.obj_id = max(obj_id + 1, self.obj_id)
except ValueError:
pass
#calc net id
if fcs == "node":
s = arr[3].split("_")
if len(s) == 4 and s[0] == "" and s[1] == "":
try:
net_id = int(s[3])
self.net_id = max(net_id + 1, self.net_id)
except ValueError:
pass
o = False
if fcs in self.canvas.cells:
o = self.canvas.cells[fcs](self)
if (o is not False):
params[name] = arr
self.objects[name] = o
#let object to parse parameters
for name in params:
arr = params[name]
o = self.objects[name]
o.parse(arr)
def find_cell(self, name):
if name in self.objects:
return self.objects[name]
else:
return False
def find_cell_pin(self, name):
arr = name.split(".")
if (len(arr) == 1):
o_name = arr[0]
o_pin = False
else:
o_name, o_pin = arr
o = self.find_cell(o_name)
if o == False:
print name, "not found!"
return False
if o_pin == False:
if len(o.outputs) > 0:
o_pin = o.outputs[0]
else:
o_pin = False
return o, o_pin
def find_output(self, obj, pin):
for k in self.objects:
o = self.objects[k]
for p in o.inputs:
pair = o.inputs[p]
if pair == False:
continue
if pair[0] == obj and pair[1] == pin:
return o, p
return False
def blit(self, surface, rect):
rect = Rect(rect)
rect.x += self.pan_offset_x
rect.y += self.pan_offset_y
rect.x *= self.zoom
rect.y *= self.zoom
self.canvas.screen.blit(surface, rect)
def draw_circle(self, pos, state):
pos = list(pos)
pos[0] += self.pan_offset_x
pos[1] += self.pan_offset_y
pos = [int(x * self.zoom) for x in pos]
if (state):
color = self.canvas.style["c_high"]
else:
color = self.canvas.style["c_low"]
self.canvas.draw_circle(color, pos, self.zoom)
def draw_line(self, start, end, state):
#copy the data
start = list(start)
end = list(end)
start[0] += self.pan_offset_x
start[1] += self.pan_offset_y
end[0] += self.pan_offset_x
end[1] += self.pan_offset_y
start = [int(x * self.zoom) for x in start]
end = [int(x * self.zoom) for x in end]
if state:
color = self.canvas.style["c_high"]
else:
color = self.canvas.style["c_low"]
self.canvas.draw_line(start, end, color, self.zoom)
def draw_rect(self, surface, color, rect, width = 0):
rect = Rect(rect)
w = int(width * self.zoom)
rect = Rect([int(x * self.zoom) for x in rect])
if width > 0 and w == 0:
w = 1
pygame.draw.rect(surface, color, rect, w)
def draw_text(self, surface, text, rect):
tmp = self.font.render(text, True, self.canvas.style["c_text"])
rect2 = tmp.get_rect()
rect = Rect([int(x * self.zoom) for x in rect])
rect = [rect.x + rect.w / 2 - rect2.w / 2, rect.y + rect.h / 2 - rect2.h / 2]
surface.blit(tmp, rect)
def draw_label(self, text, rect):
tmp = self.label_font.render(text, True, self.canvas.style["c_label"])
rect2 = tmp.get_rect()
rect = Rect([int(x * self.zoom) for x in rect])
rect = [rect.x + rect.w / 2 - rect2.w / 2, rect.y + rect.h / 2 - rect2.h / 2]
return tmp
def label_font_size(self, text):
label_font = pygame.font.Font(pygame.font.get_default_font(), self.canvas.style["d_label_font"])
tmp = label_font.render(text, True, self.canvas.style["c_text"])
rect2 = tmp.get_rect()
return rect2
def draw_highlight(self):
if self.highlight_mode == LIGHT_LINE:
start = list(self.highlight_pos[0])
end = list(self.highlight_pos[1])
width = self.canvas.style["d_line_height"]
w = int(width * self.zoom)
start[0] += self.pan_offset_x
start[1] += self.pan_offset_y
start = [int(x * self.zoom) for x in start]
end[0] += self.pan_offset_x
end[1] += self.pan_offset_y
end = [int(x * self.zoom) for x in end]
if width > 0 and w == 0:
w = 1
pygame.draw.line(self.canvas.screen, self.canvas.style["c_highlight"], start, end, w)
if self.highlight_mode == LIGHT_POINT:
width = self.canvas.style["d_point"]
w = int(width * self.zoom)
point = list(self.highlight_pos)
point[0] += int(self.pan_offset_x)
point[1] += int(self.pan_offset_y)
point = [int(x * self.zoom) for x in point]
if width > 0 and w == 0:
w = 1
pygame.draw.circle(self.canvas.screen, self.canvas.style["c_highlight"], point, w)
def draw_highlight_box(self, rect):
rect = Rect(rect)
width = self.canvas.style["d_line_height"]
w = int(width * self.zoom)
rect.x += self.pan_offset_x
rect.y += self.pan_offset_y
rect = Rect([int(x * self.zoom) for x in rect])
if width > 0 and w == 0:
w = 1
pygame.draw.rect(self.canvas.screen, self.canvas.style["c_highlight"], rect, w)
def mk_surface(self, rect):
size = [int(rect.w * self.zoom), int(rect.h * self.zoom)]
return pygame.Surface(size, self.canvas.surface_flags)
def update_zoom(self):
self.font = pygame.font.Font(pygame.font.get_default_font(), int(self.canvas.style["d_font"] * self.zoom))
self.label_font = pygame.font.Font(pygame.font.get_default_font(), int(self.canvas.style["d_label_font"] * self.zoom))
self.solve_drawable()
for k in self.objects:
self.objects[k].request_update_body()
if self.canvas.mode == MODE_ADD:
self.new_node.request_update_body()
self.canvas.request_redraw()
def request_redraw(self):
for o in self.drawable:
o.request_redraw()
def solve_drawable(self):
self.need_solve_drawable = True
def draw(self, mode):
if self.need_solve_drawable:
self.need_solve_drawable = False
window = Rect(-self.pan_offset_x, -self.pan_offset_y, self.canvas.size[0] / self.zoom, self.canvas.size[1] / self.zoom)
self.drawable = []
for k in self.objects:
self.objects[k].solve_drawable(window, self.drawable)
if mode == MODE_SELECT:
self.canvas.request_redraw()
self.canvas.request_io_redraw()
for o in self.drawable:
o.draw()
o.draw_io()
if mode == MODE_SELECT:
self.select_rect.normalize()
self.draw_highlight_box(self.select_rect)
for o in self.selected:
self.draw_highlight_box(o.rect)
if mode == MODE_WIRE:
self.draw_highlight()
if mode in [MODE_ADD, MODE_ADD_MODULE]:
if self.new_node is not False:
self.new_node.draw()
self.new_node.draw_io()
def tick(self):
for k in self.objects:
self.objects[k].tick()
def reset(self):
for k in self.objects:
self.objects[k].reset()
def request_update(self): pass
def clear_io_cache(self):
for o in self.drawable:
o.clear_io_cache()
def get_object_pos(self, pos, exclude = []):
pos = list(pos)
object_list = list(self.drawable)
object_list.reverse()
for o in object_list:
if o in exclude:
continue
if (o.rect.collidepoint(pos)):
return o
return False
#wire form input / output
def get_line_pos(self, pos, exclude = []):
pos = list(pos)
for o in self.drawable:
if o in exclude:
continue
data = o.check_input_line_collision(pos)
if (data):
if data[2] in exclude:
continue
return data
return False
#wire form net
def get_net_line_pos(self, pos, exclude=[]):
pos = list(pos)
for o in self.drawable:
if isinstance(o, wire.Node):
if o in exclude:
continue
data = o.check_net_line_collision(pos)
if (data):
if data[1] in exclude:
continue
return data
return False
def get_output_pos(self, pos, exclude=[]):
pos = list(pos)
for o in self.drawable:
if o in exclude:
continue
pin = o.check_output_collision(pos)
if (pin):
return o, pin
return False
def get_input_pos(self, pos, exclude=[]):
pos = list(pos)
for o in self.drawable:
if o in exclude:
continue
pin = o.check_input_collision(pos)
if (pin):
return o, pin
return False
def add_object(self, fcs, pos, params = []):
o = self.canvas.cells[fcs](self)
name = "__%s_%d" % (fcs, self.get_obj_id())
self.objects[name] = o
o.update()
o.middle_offset()
pos = "%dx%d" % (pos[0], pos[1])
o.parse([name, fcs, pos] + params)
self.request_redraw()
self.solve_drawable()
return o
def add_node(self, pos, net = False):
o = self.canvas.cells["node"](self)
name = "__node_%d" % (self.get_obj_id())
self.objects[name] = o
o.update()
o.middle_offset()
pos = "%dx%d" % (pos[0], pos[1])
if net is False:
net = self.add_net()
o.parse([name, "node", pos, net.name])
self.request_redraw()
self.solve_drawable()
return o
def add_net(self, net_name = False):
if net_name is False:
net_name = "__net_%d" % (self.get_net_id())
o = self.canvas.cells["net"](self)
self.objects[net_name] = o
o.parse([net_name, "net"])
return o
def apply_grid(self, obj):
g_hor = self.canvas.style["g_hor"]
g_ver = self.canvas.style["g_ver"]
obj.rect.x = int(round(obj.rect.x / float(g_hor)) * g_hor)
obj.rect.y = int(round(obj.rect.y / float(g_ver)) * g_ver)
obj.clear_offset()
obj.update_io_xy()
def delete(self, name):
if name in self.objects:
self.objects[name].disconnect()
del self.objects[name]
self.canvas.request_redraw()
self.solve_drawable()
def select_obj(self, objs):
for o in objs:
if o not in self.selected and not isinstance(o, Invisible):
self.selected.append(o)
#self.canvas.request_io_redraw()
def deselect_obj(self, objs):
for o in objs:
if o in self.selected:
self.selected.remove(o)
self.canvas.request_redraw()
def tglselect_obj(self, obj):
if obj in self.selected:
self.deselect_obj([obj])
else:
self.select_obj([obj])
def clear_selection(self):
self.selected = []
#self.canvas.request_io_redraw()
def rename_obj(self, obj, new_name):
if new_name in self.objects:
return False
del self.objects[obj.name]
obj.name = new_name
self.objects[new_name] = obj
obj.update()
return True
def event(self, event, mode):
#GET event info
hover_object = False
keys = pygame.key.get_pressed()
if hasattr(event, "pos"):
mouse_x = (event.pos[0] / self.zoom) - self.pan_offset_x
mouse_y = (event.pos[1] / self.zoom) - self.pan_offset_y
hover_object = self.get_object_pos([mouse_x, mouse_y])
if keys[pygame.K_LCTRL]:
g_hor = self.canvas.style["g_hor"]
g_ver = self.canvas.style["g_ver"]
mouse_x = int(round(mouse_x / float(g_hor)) * g_hor)
mouse_y = int(round(mouse_y / float(g_ver)) * g_ver)
if event.type == pygame.KEYDOWN:
if event.key == ord('a') and self.canvas.mode == MODE_EDIT:
fcs = self.add_list[self.add_index]
pos = "%dx%d" % (0, 0)
name = "_%s_" % fcs
self.new_node = self.canvas.cells[fcs](self)
self.new_node.update()
self.new_node.middle_offset()
self.new_node.parse([name, fcs, pos])
self.canvas.set_mode(MODE_ADD)
if event.key == ord('m') and self.canvas.mode == MODE_EDIT:
self.canvas.set_mode(MODE_ADD_MODULE)
if event.key == ord('e') and self.canvas.mode in [MODE_IDLE, MODE_WIRE, MODE_RENAME]:
self.highlight(LIGHT_NONE)
self.canvas.set_mode(MODE_EDIT)
if event.key == ord('d') and self.canvas.mode == MODE_IDLE:
self.canvas.set_mode(MODE_STEP)
if event.key == ord('w') and self.canvas.mode == MODE_EDIT:
self.canvas.set_mode(MODE_WIRE)
if event.key == ord('r') and self.canvas.mode == MODE_EDIT:
self.canvas.set_mode(MODE_RENAME)
if event.key == ord('s'):
self.read_only = not self.read_only
self.canvas.request_redraw()
if event.key == pygame.K_SPACE and self.canvas.mode == MODE_STEP:
self.tick()
if event.key == pygame.K_ESCAPE:
self.canvas.request_io_redraw()
if self.canvas.mode == MODE_STEP:
self.canvas.set_mode(MODE_IDLE)
if self.canvas.mode == MODE_EDIT:
self.clear_selection()
self.canvas.set_mode(MODE_IDLE)
if self.canvas.mode == MODE_WIRE:
self.canvas.set_mode(MODE_EDIT)
self.highlight(LIGHT_NONE)
if self.canvas.mode == MODE_ADD:
self.canvas.set_mode(MODE_EDIT)
self.new_node = False
if self.canvas.mode == MODE_ADD_MODULE:
self.canvas.set_mode(MODE_EDIT)
self.new_node = False
if self.canvas.mode == MODE_RENAME:
self.canvas.set_mode(MODE_EDIT)
#PAN is woring allways
#RIGHT DOWN => START PAN
if event.type == pygame.MOUSEBUTTONDOWN and event.button == MID:
self.pan_x = event.pos[0] / self.zoom
self.pan_y = event.pos[1] / self.zoom
self.pan = True
self.mode_before = mode
self.canvas.set_mode(MODE_PAN)
if self.pan:
#RIGHT UP => STOP PAN
if event.type == pygame.MOUSEBUTTONUP and event.button == MID:
self.pan_offset_x += event.pos[0] / self.zoom - self.pan_x
self.pan_offset_y += event.pos[1] / self.zoom - self.pan_y
self.solve_drawable()
self.canvas.request_redraw()
self.pan = False
self.canvas.set_mode(self.mode_before)
if event.type == pygame.MOUSEMOTION:
self.pan_offset_x += event.pos[0] / self.zoom - self.pan_x
self.pan_offset_y += event.pos[1] / self.zoom - self.pan_y
self.pan_x = event.pos[0] / self.zoom
self.pan_y = event.pos[1] / self.zoom
self.solve_drawable()
self.canvas.request_redraw()
#ZOOM is working allways
if event.type == pygame.MOUSEBUTTONDOWN and event.button == WHEEL_UP:
if self.zoom < 1.5:
self.pan_offset_x -= mouse_x + self.pan_offset_x - event.pos[0] / self.zoom
self.pan_offset_y -= mouse_y + self.pan_offset_y - event.pos[1] / self.zoom
pan_x = event.pos[0] / self.zoom
pan_y = event.pos[1] / self.zoom
self.zoom += self.zoom_step
self.pan_offset_x += event.pos[0] / self.zoom - pan_x
self.pan_offset_y += event.pos[1] / self.zoom - pan_y
self.update_zoom()
if event.type == pygame.MOUSEBUTTONDOWN and event.button == WHEEL_DOWN:
if self.zoom > 0.2:
pan_x = event.pos[0] / self.zoom
pan_y = event.pos[1] / self.zoom
self.zoom -= self.zoom_step
self.pan_offset_x += event.pos[0] / self.zoom - pan_x
self.pan_offset_y += event.pos[1] / self.zoom - pan_y
self.update_zoom()
if mode == MODE_IDLE or mode == MODE_STEP:
if event.type == pygame.MOUSEBUTTONDOWN and event.button == LEFT:
if hover_object is not False:
hover_object.click()
if mode == MODE_RENAME:
#LEFT DOWN => RENAME
if event.type == pygame.MOUSEBUTTONDOWN and event.button == LEFT:
if hover_object is not False:
if isinstance(hover_object, cell.Label):
label = utils.gui_textedit("Change the label", hover_object.label)
if len(label) == 0:
utils.gui_alert("Error", "Labels can't be empty")
else:
hover_object.label = label
hover_object.update()
self.canvas.set_mode(MODE_EDIT)
else:
if isinstance(hover_object, wire.Node):
obj = hover_object.net
else:
obj = hover_object
old_name = obj.name
name = utils.gui_textedit("Rename the object", obj.name)
if old_name == name:
return
if len(name) == 0:
utils.gui_alert("Error", "Name can't be empty")
return
if not self.rename_obj(obj, name):
utils.gui_alert("Error", "Unable to rename object")
else:
self.canvas.set_mode(MODE_EDIT)
if mode == MODE_EDIT:
#LEFT DOWN => START SELECT
if event.type == pygame.MOUSEBUTTONDOWN and event.button == LEFT:
if hover_object is False:
#SHIFT prevent clear selection
if not keys[pygame.K_LSHIFT]:
self.clear_selection()
self.canvas.set_mode(MODE_SELECT)
self.select_start = [mouse_x, mouse_y]
self.select_rect = pygame.Rect(mouse_x, mouse_y, 0, 0)
else:
if keys[pygame.K_LSHIFT]:
self.tglselect_obj(hover_object)
else:
if hover_object not in self.selected:
self.clear_selection()
self.select_obj([hover_object])
if hover_object in self.selected:
self.possible_move = True
if event.type == pygame.MOUSEBUTTONUP and event.button == LEFT:
if self.possible_move is True:
self.possible_move = False
if event.type == pygame.MOUSEMOTION:
if self.possible_move is True:
self.possible_move = False
for o in self.selected:
o.set_offset(mouse_x - o.rect[0], mouse_y - o.rect[1])
self.canvas.set_mode(MODE_MOVE)
if event.type == pygame.KEYDOWN and event.key == pygame.K_DELETE:
for o in self.selected:
self.delete(o.name)
self.clear_selection()
if mode == MODE_SELECT:
if event.type == pygame.MOUSEMOTION:
w = mouse_x - self.select_start[0]
h = mouse_y - self.select_start[1]
self.select_rect = pygame.Rect(self.select_start[0], self.select_start[1], w, h)
if event.type == pygame.MOUSEBUTTONUP and event.button == LEFT:
self.canvas.request_io_redraw()
for k in self.objects:
o = self.objects[k]
if (self.select_rect.colliderect(o.rect)):
self.select_obj([o])
self.canvas.set_mode(MODE_EDIT);
if mode == MODE_MOVE:
if event.type == pygame.MOUSEBUTTONUP and event.button == LEFT:
self.canvas.request_redraw()
for o in self.selected:
o.set_pos(mouse_x, mouse_y)
self.apply_grid(o)
if (len(self.selected) == 1):
self.clear_selection()
self.canvas.set_mode(MODE_EDIT);
if event.type == pygame.MOUSEMOTION:
self.canvas.request_redraw()
for o in self.selected:
o.set_pos(mouse_x, mouse_y)
if mode == MODE_WIRE:
if event.type == pygame.MOUSEBUTTONDOWN and event.button == LEFT:
print
print "<<"
print "get_object_pos", hover_object
if isinstance(hover_object, wire.Node):
self.new_node = self.add_node([mouse_x, mouse_y], hover_object.net)
self.new_node.add_sibling(hover_object)
self.new_node_direction = NODE_DIR_FROM_NODE
self.solve_drawable()
return
target = self.get_input_pos([mouse_x, mouse_y])
print "get_input_pos", target
if target is not False:
obj, pin = target
self.new_node = self.add_node([mouse_x, mouse_y])
obj.assign_input(pin, self.new_node, "Y")
self.new_node_direction = NODE_DIR_FROM_INPUT
self.solve_drawable()
return
target = self.get_output_pos([mouse_x, mouse_y])
print "get_output_pos", target
if target is not False:
obj, pin = target
self.new_node = self.add_node([mouse_x, mouse_y])
self.new_node.assign_free_input(obj, pin)
self.new_node_direction = NODE_DIR_FROM_OUTPUT
self.solve_drawable()
return
target = self.get_line_pos([mouse_x, mouse_y])
print "get_line_pos", target
if target is not False:
obj, obj_pin, inp, inp_pin = target
start_node = self.add_node([mouse_x, mouse_y])
self.apply_grid(start_node)
if isinstance(inp, wire.Node):
inp.add_sibling(start_node)
start_node.net.remove_node(self.new_node)
self.delete(start_node.net.name)
inp.net.add_node(start_node)
obj.assign_input(obj_pin, start_node, "Y")
if isinstance(obj, wire.Node):
obj.add_sibling(start_node)
start_node.net.remove_node(start_node)
self.delete(start_node.net.name)
obj.net.add_node(start_node)
start_node.assign_free_input(inp, inp_pin)
self.new_node = self.add_node([mouse_x, mouse_y], start_node.net)
self.new_node.add_sibling(start_node)
self.new_node_direction = NODE_DIR_FROM_NODE
self.solve_drawable()
return
target = self.get_net_line_pos([mouse_x, mouse_y])
print "get_net_line_pos", target
if target is not False:
node1, node2, net = target
start_node = self.add_node([mouse_x, mouse_y], net)
self.apply_grid(start_node)
node1.remove_sibling(node2)
node1.add_sibling(start_node)
node2.remove_sibling(node1)
node2.add_sibling(start_node)
self.new_node = self.add_node([mouse_x, mouse_y], start_node.net)
self.new_node.add_sibling(start_node)
self.new_node_direction = NODE_DIR_FROM_NODE
self.solve_drawable()
return
else:
if hover_object is False:
start_node = self.add_node([mouse_x, mouse_y])
self.apply_grid(start_node)
self.new_node = self.add_node([mouse_x, mouse_y], start_node.net)
self.new_node.add_sibling(start_node)
self.new_node_direction = NODE_DIR_FROM_NODE
self.solve_drawable()
if event.type == pygame.MOUSEBUTTONUP and event.button == LEFT:
if self.new_node is not False:
self.new_node.set_pos(mouse_x, mouse_y)
self.apply_grid(self.new_node)
print
print ">>"
target = self.get_object_pos([mouse_x, mouse_y], [self.new_node])
print "get_object_pos", target
if target is not False:
if isinstance(target, wire.Node):
#FROM_INPUT / FROM_OUTPUT will be handeled lower
if self.new_node_direction == NODE_DIR_FROM_NODE:
prev = self.new_node.siblings[0]
target.add_sibling(prev)
prev.net.asimilate(target.net)
self.delete(self.new_node.name)
self.new_node = False
self.solve_drawable()
return
target = self.get_input_pos([mouse_x, mouse_y], [self.new_node])
print "get_input_pos", target
if target is not False and self.new_node_direction is not NODE_DIR_FROM_INPUT:
obj, pin = target
if self.new_node_direction == NODE_DIR_FROM_NODE:
obj.assign_input(pin, self.new_node.siblings[0], "Y")
if self.new_node_direction == NODE_DIR_FROM_OUTPUT:
key = self.new_node.inputs.keys()[0]
inp, inp_pin = self.new_node.inputs[key]
obj.assign_input(pin, inp, inp_pin)
self.delete(self.new_node.name)
self.new_node = False
self.solve_drawable()
return
target = self.get_output_pos([mouse_x, mouse_y], [self.new_node])
print "get_output_pos", target
if target is not False and self.new_node_direction is not NODE_DIR_FROM_OUTPUT:
obj, pin = target
if self.new_node_direction == NODE_DIR_FROM_NODE:
self.new_node.siblings[0].assign_free_input(obj , pin)
if self.new_node_direction == NODE_DIR_FROM_INPUT:
orig_obj, orig_pin = self.find_output(self.new_node, "Y")
orig_obj.assign_input(orig_pin, obj, pin)
self.delete(self.new_node.name)
self.new_node = False
self.solve_drawable()
return
target = self.get_line_pos([mouse_x, mouse_y], [self.new_node])
print "get_line_pos", target
if target is not False:
obj, obj_pin, inp, inp_pin = target
if isinstance(inp, wire.Node):
inp.add_sibling(self.new_node)
self.new_node.net.asimilate(inp.net)
else:
self.new_node.assign_free_input(inp , inp_pin)
if isinstance(obj, wire.Node):
obj.add_sibling(self.new_node)
obj.clear_input(obj_pin)
self.new_node.net.asimilate(obj.net)
else:
obj.assign_input(obj_pin, self.new_node, "Y")
self.new_node = False
self.solve_drawable()
return
target = self.get_net_line_pos([mouse_x, mouse_y], [self.new_node])
print "get_net_line_pos", target
if target is not False:
node1, node2, net = target
node1.remove_sibling(node2)
node1.add_sibling(self.new_node)
node2.remove_sibling(node1)
node2.add_sibling(self.new_node)
self.new_node.net.asimilate(net)
self.new_node = False
self.solve_drawable()
return
self.new_node = False
self.canvas.request_redraw()
if event.type == pygame.MOUSEBUTTONDOWN and event.button == RIGHT:
if self.new_node is not False:
self.delete(self.new_node.name)
self.new_node = False
else:
#delete node or split siblings or net
if isinstance(hover_object, wire.Node):
siblings = hover_object.net.list_node_sibling(hover_object)
if len(siblings) > 0:
successor = siblings[0]
for node in siblings:
successor.add_sibling(node)
for k in hover_object.inputs:
print "hover_object.input", k, hover_object, hover_object.inputs
obj, pin = hover_object.inputs[k]
successor.assign_free_input(obj, pin)
target = self.find_output(hover_object, "Y")
while target is not False:
obj, pin = target
obj.assign_input(pin, successor, "Y")
target = self.find_output(hover_object, "Y")
self.delete(hover_object.name)
self.highlight(LIGHT_NONE)
self.solve_drawable()
return
target = self.get_line_pos([mouse_x, mouse_y])
print "get_line_pos", target
if target is not False:
obj, obj_pin, inp, inp_pin = target
obj.clear_input(obj_pin)
self.highlight(LIGHT_NONE)
self.solve_drawable()
self.canvas.request_redraw()
return
target = self.get_net_line_pos([mouse_x, mouse_y], [self.new_node])
print "get_net_line_pos", target
if target is not False:
node1, node2, net = target
node1.remove_sibling(node2)
node2.remove_sibling(node1)
net.rebuild()
self.canvas.request_redraw()
self.highlight(LIGHT_NONE)
self.solve_drawable()
return
if event.type == pygame.MOUSEMOTION:
if self.new_node is not False:
self.new_node.set_pos(mouse_x, mouse_y)
self.canvas.request_redraw()
target = self.get_object_pos([mouse_x, mouse_y], [self.new_node])
# print "get_object_pos", target
if target is not False:
if isinstance(target, wire.Node):
self.highlight(LIGHT_POINT, target.output_xy["Y"]);
return
target = self.get_input_pos([mouse_x, mouse_y], [self.new_node])
# print "get_input_pos", target
if target is not False:
obj, pin = target
pos = obj.input_xy[pin]
self.highlight(LIGHT_POINT, pos);
return
target = self.get_output_pos([mouse_x, mouse_y], [self.new_node])
# print "get_output_pos", target
if target is not False:
obj, pin = target
pos = obj.output_xy[pin]
self.highlight(LIGHT_POINT, pos);
return
target = self.get_line_pos([mouse_x, mouse_y], [self.new_node])
# print "get_line_pos", target
if target is not False:
obj, obj_pin, inp, inp_pin = target
if isinstance(obj, wire.Node):
start = obj.output_xy["Y"]
else:
start = obj.input_xy[obj_pin]
if isinstance(inp, wire.Node):
end = inp.output_xy["Y"]
else:
end = inp.output_xy[inp_pin]
self.highlight(LIGHT_LINE, [start, end])
return
target = self.get_net_line_pos([mouse_x, mouse_y], [self.new_node])
# print "get_net_line_pos", target
if target is not False:
node1, node2, net = target
start = node1.output_xy["Y"]
end = node2.output_xy["Y"]
self.highlight(LIGHT_LINE, [start, end])
return
self.highlight(LIGHT_NONE)
if mode == MODE_ADD:
if event.type == pygame.MOUSEBUTTONDOWN and event.button == RIGHT:
self.add_index = (self.add_index + 1) % len(self.add_list)
fcs = self.add_list[self.add_index]
pos = "%dx%d" % (mouse_x, mouse_y)
name = "_%s_" % fcs
self.new_node = self.canvas.cells[fcs](self)
self.new_node.update()
self.new_node.middle_offset()
self.new_node.parse([name, fcs, pos])
self.new_node.drawable = True
self.canvas.request_redraw()
if event.type == pygame.MOUSEMOTION:
if self.new_node is not False:
self.new_node.set_pos(mouse_x, mouse_y)
self.new_node.clear_io_cache()
self.canvas.request_redraw()
if event.type == pygame.MOUSEBUTTONDOWN and event.button == LEFT:
o = self.add_object(self.add_list[self.add_index], [mouse_x, mouse_y])
self.apply_grid(o)
if mode == MODE_ADD_MODULE:
if event.type == pygame.MOUSEBUTTONDOWN and event.button == RIGHT:
fcs = "module"
pos = "%dx%d" % (mouse_x, mouse_y)
name = "_%s_" % fcs
self.new_node = self.canvas.cells[fcs](self)
self.new_node.update()
self.new_node.middle_offset()
self.new_node.parse([name, fcs, pos])
self.new_node_filename = self.new_node.filename
self.new_node.drawable = True
self.canvas.request_redraw()
if event.type == pygame.MOUSEMOTION:
if self.new_node is not False:
self.new_node.set_pos(mouse_x, mouse_y)
self.new_node.clear_io_cache()
self.canvas.request_redraw()
if event.type == pygame.MOUSEBUTTONDOWN and event.button == LEFT:
o = self.add_object("module", [mouse_x, mouse_y], [self.new_node_filename])
self.apply_grid(o)
|
gpl-2.0
| -1,118,782,217,478,487,700
| 37.420918
| 149
| 0.441348
| false
| 4.188949
| false
| false
| false
|
lliendo/Radar
|
radar/logger/__init__.py
|
1
|
2413
|
# -*- coding: utf-8 -*-
"""
This file is part of Radar.
Radar is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Radar is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
Lesser GNU General Public License for more details.
You should have received a copy of the Lesser GNU General Public License
along with Radar. If not, see <http://www.gnu.org/licenses/>.
Copyright 2015 Lucas Liendo.
"""
from logging import getLogger, Formatter, shutdown, INFO
from logging.handlers import RotatingFileHandler
from os.path import dirname
from os import mkdir
from errno import EEXIST
from sys import stderr
class LoggerError(Exception):
pass
class RadarLogger(object):
_shared_state = {'logger': None}
def __init__(self, path, logger_name='radar', max_size=100, rotations=5):
self.__dict__ = self._shared_state
self._create_dir(path)
self._shared_state['logger'] = self._configure_logger(path, logger_name, max_size * (1024 ** 2), rotations)
def _create_dir(self, path):
try:
mkdir(dirname(path))
except OSError as e:
if e.errno != EEXIST:
raise LoggerError('Error - Couldn\'t create directory : \'{:}\'. Details : {:}.'.format(path, e.strerror))
def _configure_logger(self, path, logger_name, max_size, rotations):
try:
logger = getLogger(logger_name)
logger.setLevel(INFO)
file_handler = RotatingFileHandler(path, maxBytes=max_size, backupCount=rotations)
file_handler.setFormatter(Formatter(fmt='%(asctime)s - %(message)s', datefmt='%b %d %H:%M:%S'))
logger.addHandler(file_handler)
except Exception as e:
raise LoggerError('Error - Couldn\'t configure Radar logger. Details : {:}.'.format(e))
return logger
@staticmethod
def log(message):
try:
RadarLogger._shared_state['logger'].info(message)
except Exception as e:
stderr.write('Error - Couldn\'t log to Radar logger. Details : {:}.'.format(e))
@staticmethod
def shutdown():
shutdown()
|
lgpl-3.0
| -8,177,158,718,975,967,000
| 32.513889
| 122
| 0.665147
| false
| 3.94281
| false
| false
| false
|
gopythongo/gopythongo
|
src/py/gopythongo/vaultgetcert.py
|
1
|
33878
|
# -* encoding: utf-8 *-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import functools
import os
import subprocess
import sys
import hvac
import configargparse
from typing import Dict, Sequence, Iterable, Union, Any, cast, TextIO, Callable
from OpenSSL import crypto
from gopythongo.main import DebugConfigAction
from requests.exceptions import RequestException
out_target = sys.stdout
umask_cur = os.umask(0o022)
os.umask(umask_cur)
def _out(*args: Any, **kwargs: Any) -> None:
if "file" not in kwargs:
kwargs["file"] = sys.stderr
print(*args, **kwargs)
def _result_output(envvar: str, value: str) -> None:
print("%s=%s" % (envvar, value,), file=out_target)
def _result_envdir(envdir: str, envvar: str, value: str) -> None:
fn = os.path.join(envdir, envvar)
_out("writing %s=%s" % (envvar, fn))
with open(fn, mode="wt", encoding="utf-8") as envf:
envf.write(value)
_result = _result_output # type: Callable[..., None]
def _get_masked_mode(mode: Union[int, str]) -> int:
if isinstance(mode, str):
m = int(mode, base=8)
else:
m = mode
return (0o777 ^ umask_cur) & m
class HelpAction(configargparse.Action):
def __init__(self,
option_strings: Sequence[str],
dest: str,
default: Any=None,
choices: Iterable[Any]=None,
help: str="Show help for GoPythonGo version parsers.") -> None:
super().__init__(option_strings=option_strings, dest=dest, default=default,
nargs="?", choices=choices, help=help)
def __call__(self, parser: configargparse.ArgumentParser, namespace: configargparse.Namespace,
values: Union[str, Sequence[Any], None], option_string: str=None) -> None:
print("Secret Management\n"
"=================\n"
"\n"
"This is a little helper tool that contacts a Vault server to issue a SSL client\n"
"certificate and save its X.509 certificate and private key to local files. If\n"
"you use this on your build server to create client certificates for each\n"
"Continuous Integration (CD) build, you can create client credentials for\n"
"accessing Vault instances or databases or other services on your environments\n"
"right on your buildserver. In my opinion this is the best place for the\n"
"credentials to live, since they can be dynamic and don't need to live in either\n"
"your configuration management software (Puppet/Chef/Ansible/Salt) or in your\n"
"application. Both places which are often shared far and wide throughout your\n"
"organization.\n"
"\n"
"Instead giving each build its own certificate and each deployment environment\n"
"(development/stage/production) it's own (intermediate) CA does not detract from\n"
"security (you have to trust your CD infrastructure implicitly, if it's\n"
"compromised, the attacker can deploy malicious code), but makes it much easier\n"
"to, for example, revoke access credentials in bulk using CRLs.\n"
"\n"
"Finally, you can use the created certificates to access a separate Vault\n"
"instance inside your deployment environments and create local service\n"
"credentials there (like short-lived database access credentials). Thereby\n"
"using Vault's audit backends to create a secure offsite audit trail of activity.\n"
"\n"
"vaultgetcert can also output environment variable key/value pairs and create\n"
"multiple certificate chains for cross-signed trust paths, allowing you to\n"
"centralize secret management as described in the GoPythonGo process "
"documentation.\n"
"\n"
"Here is a cheatsheet for setting up a PKI endpoint in Vault:\n"
"\n"
"# Mount one PKI backend per environment and/or application that gets its own\n"
"# builds on this server and allow builds to remain valid for 1 year (tune to\n"
"# your specifications). Application CAs are better suited to Vault as it binds\n"
"# roles to CAs. Environment CAs are better suited to some servers like Postgres\n"
"# as they bind roles to CNs. Using vaultgetcert you can also easily use\n"
"# cross-signed intermediate CAs and use both approaches.\n"
"vault mount -path=pki-YourApplication -default-lease-ttl=8760h \\\n"
" -max-lease-ttl=8760h pki\n"
"\n"
"# generate an intermediate CA with a 2048 bit key (default)\n"
"vault write pki-YourApplication/intermediate/generate/internal \\\n"
" common_name=\"(YourApplication) Build CA X1\"\n"
"\n"
"# Sign the intermediate CA using your private CA\n"
"# then write the certificate back to the Vault store\n"
"vault write pki-YourApplication/intermediate/set-signed certificate=@cacert.pem\n"
"\n"
"# Now this CA certificate should be installed on the relevant servers, e.g. in\n"
"# Postgres ssl_ca_cert. You can also use the root certificate with a trustchain\n"
"# in the client certificate.\n"
"vault write pki-YourApplication/roles/build ttl=8760h allow_localhost=false \\\n"
" allow_ip_sans=false server_flag=false client_flag=true \\\n"
" allow_any_name=true key_type=rsa\n"
"\n"
"# Request a build certificate for a build.\n"
"# This is basically what vaultgetcert does! So instead of running this command\n"
"# use vaultgetcert :)\n"
"# We \"hack\" the git hash into a domain name SAN because Vault currently\n"
"# doesn't support freetext SANs.\n"
"vault write pki-YourApplication/issue/build common_name=\"yourapp\" \\\n"
" alt_names=\"024572834273498734.git\" exclude_cn_from_sans=true\n"
"\n"
"# Set everything up to authenticate to Vault using these certs. For example:\n"
"vault auth-enable cert\n"
"vault mount -path=db-YourApplication postgresql\n"
"vault write db-YourApplication/config/lease lease=96h lease_max=96h\n"
"vault write db-YourApplication/config/connection connection_url=-\n"
"postgresql://vaultadmin:(PASSWORD)@postgresql.local:5432/YourAppDatabase\n"
"\n"
"vault write db-YourApplication/roles/fullaccess sql=-\n"
" CREATE ROLE \"{{name}}\" WITH LOGIN ENCRYPTED PASSWORD '{{password}}' VALID\n"
" UNTIL '{{expiration}}' IN ROLE \"YourAppDBOwner\" INHERIT NOCREATEROLE\n"
" NOCREATEDB NOSUPERUSER NOREPLICATION NOBYPASSRLS;\n"
"\n"
"vault policy-write yourapp_rights -\n"
"path \"db-YourApplication/creds/fullaccess\" {\n"
" capabilities = [\"read\"]\n"
"}\n"
"\n"
"vault write auth/cert/certs/YourApplication \\\n"
" display_name=yourapp \\\n"
" policies=yourapp_rights \\\n"
" certificate=@cacert.pem \\\n"
" ttl=3600\n")
parser.exit(0)
def get_parser() -> configargparse.ArgumentParser:
parser = configargparse.ArgumentParser(
description="This is a little helper tool that contacts a Vault server to issue a SSL client "
"certificate and save its X.509 certificate and private key to local files. Use "
"--help-verbose to learn more. vaultgetcert expects everything to be PEM encoded. "
"It cannot convert between different formats.",
prog="gopythongo.vaultgetcert",
args_for_setting_config_path=["-c"],
config_arg_help_message="Use this path instead of the default (.gopythongo/vaultwrapper)",
default_config_files=[".gopythongo/vaultgetcert",]
)
parser.add_argument("-o", "--output", dest="output", default=None, env_var="VGC_OUTPUT",
help="Direct output to this file or folder (when in envdir mode). (default: stdout)")
parser.add_argument("--envdir", dest="envdir_mode", default=False, action="store_true", env_var="VGC_ENVDIR",
help="When this is set, vaultgetcert will write each environment variable setting into its "
"own file, creating a DJB daemontools compatible envdir.")
parser.add_argument("--address", dest="vault_address", default="https://vault.local:8200",
env_var="VGC_VAULT_URL",
help="Vault API base URL (default: https://vault.local:8200/). ")
parser.add_argument("--vault-pki", dest="vault_pki", default=None, required=True,
env_var="VGC_VAULT_PKI",
help="The PKI backend path to issue a certificate from Vault (e.g. 'pki/issue/[role]').")
parser.add_argument("--subject-alt-names", dest="subject_alt_names", env_var="VGC_SUBJECT_ALTNAME",
default=None,
help="alt_names parameter to pass to Vault for the issued certificate. (Use a comma-separated "
"list if you want to specify more than one.)")
parser.add_argument("--common-name", dest="common_name", env_var="VGC_COMMON_NAME", default=None, required=True,
help="The CN to pass to Vault for the issued certificate.")
parser.add_argument("--include-cn-in-sans", dest="include_cn_in_sans", env_var="VGC_INCLUDE_CN_IN_SANS",
default=False, action="store_true",
help="Set this if you want the value of --common-name to also show up in the issued "
"certificate's SANs.")
parser.add_argument("--certfile-out", dest="certfile", env_var="VGC_CERTFILE_OUT", required=True,
help="Path of the file where the generated certificate will be stored. ")
parser.add_argument("--keyfile-out", dest="keyfile", env_var="VGC_KEYFILE_OUT", required=True,
help="Path of the file where the generated private key will be stored. Permissions for this "
"file will be set to 600.")
parser.add_argument("--certchain-out", dest="certchain", env_var="VGC_CERTCHAIN_OUT", default=None,
help="Save the issuer CA certificate, which is likely the intermediate CA that you need to "
"provide in the certificate chain.")
parser.add_argument("--overwrite", dest="overwrite", env_var="VGC_OVERWRITE", default=False, action="store_true",
help="When set, this program will overwrite existing certificates and keys on disk. ")
parser.add_argument("--help-verbose", action=HelpAction,
help="Show additional information about how to set up Vault for using vaultgetcert.")
parser.add_argument("--debug-config", action=DebugConfigAction)
gp_xsign = parser.add_argument_group("Handling cross-signing CAs")
gp_xsign.add_argument("--xsign-cacert", dest="xsigners", default=[], action="append", env_var="VGC_XSIGN_CACERT",
help="Can be set multiple times. The argument must be in the form 'bundlename=certificate'. "
"For each certificate specified, vaultgetcert will verify that it uses the same public "
"key as the issuer certificate returned by Vault. It will then create a bundle "
"(concatenated PEM file) for each xsign-cacert with the specified name. MUST be used "
"together with --xsign-bundle-path. You can specify an absolute path for bundlename in "
"which case --xsign-bundle-path will not be used for that bundlename. This option has "
"hacky support for multiple values in its environment variable. You can specify "
"multiple comma-separated values.")
gp_xsign.add_argument("--issuer-bundle", dest="issuer_bundle", default=None,
help="The argument for this is the bundlename for the issuer certificate returned by Vault. "
"That bundlename will be handled like --xsign-cacert bundlenames. It can also be used "
"in --output-bundle-envvar, thereby allowing you to use whichever CA Vault returns like "
"any other well-known CA.")
gp_xsign.add_argument("--xsign-bundle-path", dest="bundlepath", default=None, env_var="VGC_XSIGN_BUNDLE_PATH",
help="A folder where all of the generated files without absolute paths from specified "
"--xsign-cacert parameters will be stored. Existing bundles will be overwritten.")
gp_xsign.add_argument("--output-bundle-envvar", dest="bundle_envvars", default=[], action="append",
env_var="VGC_OUTPUT_BUNDLE_ENVVAR",
help="Can be specified multiple times. The argument must be in the form "
"'envvar=bundlename[:altpath]' (altpath is optional). "
"For each envvar specified vaultgetcert will output 'envvar=bundlepath' to stdout. If "
"you specify 'altpath', 'altpath' will replace the FULL path in bundlepath. The "
"filename will stay the same. This output is meant to be used as configuration "
"environment variables for your program and can be shipped, for example, for usage in "
"/etc/default.")
gp_xsign.add_argument("--output-key-envvar", dest="key_envvars", default=[], action="append",
env_var="VGC_OUTPUT_KEY_ENVVAR",
help="Can be specified multiple times. Output one or more key/value pairs to stdout in the "
"form 'envvar=keyfile' where 'keyfile' is the file specified by --keyfile-out. Each "
"argument should be formatted like 'envvar[:altpath]' where 'altpath' is optional. If "
"'altpath' is specified, the keyfile's path will be replaced by 'altpath' in the "
"output.")
gp_filemode = parser.add_argument_group("File mode options")
gp_filemode.add_argument("--mode-mkdir-output", dest="mode_output_dir", default="0o755",
env_var="VGC_MODE_MKDIR_OUTPUT",
help="If the output folder for the environment variable configuration (--output) doesn't "
"exist yet, create it with these permissions (will be umasked). (default: 0o755)")
gp_filemode.add_argument("--mode-mkdir-certs", dest="mode_certs_dir", default="0o755",
env_var="VGC_MODE_MKDIR_CERTS",
help="If the output folders for certificates and bundles (--certfile-out, "
"--certchain-out, --xsign-bundle-path) doesn't exist yet, create them with these "
"permissions (will be umasked). (default: 0o755)")
gp_filemode.add_argument("--mode-mkdir-key", dest="mode_key_dir", default="0o700",
env_var="VGC_MODE_MKDIR_KEY",
help="If the output folder for the private key (--keyfile-out) doesn't exist yet, "
"create it with these permissions (will be umasked). (default: 0o700)")
gp_filemode.add_argument("--mode-file-output", dest="mode_output_file", default="0o644",
env_var="VGC_MODE_FILE_OUTPUT",
help="Create the output file (--output) with these permissions (will be umasked). "
"(default: 0o644)")
gp_filemode.add_argument("--mode-certbundles", dest="mode_certbundle_files", default="0o644",
env_var="VGC_MODE_CERTBUNDLES",
help="Create the certbundle files (--xsign-cacert) with these permissions (will be "
"umasked). (default: 0o644)")
gp_filemode.add_argument("--mode-keyfile", dest="mode_key_file", default="0o600",
env_var="VGC_MODE_KEYFILE",
help="Create the private key file (--keyfile-out) with these permissions (will be "
"umasked). (default: 0o600)")
gp_https = parser.add_argument_group("HTTPS options")
gp_https.add_argument("--pin-cacert", dest="pin_cacert", default="/etc/ssl/certs/ca-certificates.crt",
env_var="VGC_VAULT_CACERT",
help="Set the CA certificate for Vault (i.e. the server certificate MUST be signed by a CA "
"in this file). The file should contain a list of CA certificates. The default is the "
"location of the Debian Linux CA bundle (Default: '/etc/ssl/certs/ca-certificates.crt')")
gp_https.add_argument("--tls-skip-verify", dest="verify", env_var="VGC_SSL_SKIP_VERIFY", default=True,
action="store_false",
help="Skip SSL verification (only use this during debugging or development!)")
gp_auth = parser.add_argument_group("Vault authentication options")
gp_auth.add_argument("--token", dest="vault_token", env_var="VAULT_TOKEN", default=None,
help="A Vault access token with a valid lease. This is one way of authenticating the wrapper "
"to Vault. This is mutually exclusive with --app-id/--user-id. ")
gp_auth.add_argument("--app-id", dest="vault_appid", env_var="VAULT_APPID", default=None,
help="Set the app-id for Vault app-id authentication.")
gp_auth.add_argument("--user-id", dest="vault_userid", env_var="VAULT_USERID", default=None,
help="Set the user-id for Vault app-id authentication.")
gp_auth.add_argument("--client-cert", dest="client_cert", default=None, env_var="VAULT_CLIENTCERT",
help="Use a HTTPS client certificate to connect.")
gp_auth.add_argument("--client-key", dest="client_key", default=None, env_var="VAULT_CLIENTKEY",
help="Set the HTTPS client certificate private key.")
gp_git = parser.add_argument_group("Git integration")
gp_git.add_argument("--use-git", dest="git_binary", default="/usr/bin/git", env_var="VGC_GIT",
help="Specify an alternate git binary to call for git integration. (default: /usr/bin/git)")
gp_git.add_argument("--git-include-commit-san", dest="git_include_commit_san", default=".", action="store_true",
env_var="VGC_INCLUDE_COMMIT_SAN",
help="If 'git rev-parse HEAD' returns a commit hash, add a certificate SAN called "
"'[commithash].git'.")
return parser
xsign_bundles = {} # type: Dict[str, str]
bundle_vars = {} # type: Dict[str, Dict[str, str]]
def validate_args(args: configargparse.Namespace) -> None:
if args.vault_token:
pass
elif args.vault_appid and args.vault_userid:
pass
elif args.client_cert and args.client_key:
pass
else:
_out("* ERR VAULT CERT UTIL *: You must specify an authentication method, so you must pass either "
"--token or --app-id and --user-id or --client-cert and --client-key or set the VAULT_TOKEN, "
"VAULT_APPID and VAULT_USERID environment variables respectively. If you run GoPythonGo under "
"sudo (e.g. for pbuilder), make sure your build server environment variables also exist in the "
"root shell, or build containers, or whatever else you're using.")
if args.vault_appid:
_out("* INF VAULT CERT UTIL *: appid is set")
if args.vault_userid:
_out("* INF VAULT CERT UTIL *: userid is set")
if args.client_cert:
_out("* INF VAULT CERT UTIL *: client_cert is set")
if args.client_key:
_out("* INF VAULT CERT UTIL *: client_key is set")
sys.exit(1)
if args.client_cert and (not os.path.exists(args.client_cert) or not os.access(args.client_cert, os.R_OK)):
_out("* ERR VAULT CERT UTIL *: %s File not found or no read privileges" % args.client_cert)
sys.exit(1)
if args.client_key and (not os.path.exists(args.client_key) or not os.access(args.client_key, os.R_OK)):
_out("* ERR VAULT CERT UTIL *: %s File not found or no read privileges" % args.client_key)
sys.exit(1)
if os.path.exists(args.certfile) and not args.overwrite:
_out("* ERR VAULT CERT UTIL *: %s already exists and --overwrite is not specified" % args.certfile)
sys.exit(1)
if os.path.exists(os.path.dirname(args.certfile)) and not os.access(os.path.dirname(args.certfile), os.W_OK):
_out("* ERR VAULT CERT UTIL *: %s already exists and is not writable (--certfile-out)" %
os.path.dirname(args.certfile))
sys.exit(1)
if os.path.exists(args.keyfile) and not args.overwrite:
_out("* ERR VAULT CERT UTIL *: %s already exists and --overwrite is not specified" % args.keyfile)
sys.exit(1)
if os.path.exists(os.path.dirname(args.keyfile)) and not os.access(os.path.dirname(args.keyfile), os.W_OK):
_out("* ERR VAULT CERT UTIL *: %s already exists and is not writable (--keyfile-out)" %
os.path.dirname(args.keyfile))
sys.exit(1)
if args.git_include_commit_san and (not os.path.exists(args.git_binary) or not os.access(args.git_binary, os.X_OK)):
_out("* ERR VAULT CERT UTIL *: --git-include-commit-san is set, but Git binary %s does not exist or is not "
"executable" % args.git_binary)
sys.exit(1)
for xcertspec in args.xsigners:
if "," in xcertspec:
xcertspec, y = xcertspec.split(",", 1)[0].strip(), xcertspec.split(",", 1)[1].strip()
args.xsigners += [y]
if "=" not in xcertspec:
_out("* ERR VAULT CERT UTIL *: each --xsign-cacert argument must be formed as 'bundlename=certificate'. "
"%s is not." % xcertspec)
bundlename, xcert = xcertspec.split("=", 1)
if bundlename not in xsign_bundles.keys():
xsign_bundles[bundlename] = xcert
else:
_out("* ERR VAULT CERT UTIL *: duplicate xsigner bundle name %s (from 1:%s and 2:%s=%s)" %
(bundlename, xcertspec, bundlename, xsign_bundles[bundlename]))
if not os.path.exists(xcert) or not os.access(xcert, os.R_OK):
_out("* ERR VAULT CERT UTIL *: %s does not exist or is not readable (from %s)" % (xcert, xcertspec))
sys.exit(1)
if args.issuer_bundle:
xsign_bundles[args.issuer_bundle] = None
if args.bundlepath:
if os.path.exists(args.bundlepath) and not os.access(args.bundlepath, os.W_OK):
_out("* ERR VAULT CERT UTIL *: %s is not writable" % args.bundlepath)
for benvspec in args.bundle_envvars:
if "=" not in benvspec:
_out("* ERR VAULT CERT UTIL *: each --output-bundle-envvar must be formed as 'envvar=bundlename[:altpath]' "
"with altpath being optional. %s is not." % benvspec)
sys.exit(1)
envvar, bundlespec = benvspec.split("=", 1)
if ":" in bundlespec:
bundleref, altpath = bundlespec.split(":", 1)
else:
bundleref, altpath = bundlespec, None
if bundleref not in xsign_bundles.keys():
_out("* ERR VAULT CERT UTIL *: --output-bundle-envvar argument %s references a bundle name %s which has "
"not been specified as an argument to --xsign-cacert." % (benvspec, bundleref))
sys.exit(1)
_out("* INF VAULT CERT UTIL *: registered environment %s" % envvar)
bundle_vars[bundleref] = {
"envvar": envvar,
"altpath": altpath,
}
for perms in [args.mode_output_dir, args.mode_certs_dir, args.mode_key_dir, args.mode_output_file,
args.mode_certbundle_files, args.mode_key_file]:
try:
int(perms, base=8)
except ValueError:
_out("* ERR VAULT CERT UTIL *: %s is not a vaild permission string (must be octal unix file/folder "
"permissions" % perms)
sys.exit(1)
if args.envdir_mode and os.path.exists(args.output) and not os.path.isdir(args.output):
_out("* ERR VAULT CERT UTIL *: %s already exists and is not a directory. --envdir requires the output path "
"to be a directory or not exist.")
def main() -> None:
global out_target, _result
_out("* INF VAULT CERT UTIL *: cwd is %s" % os.getcwd())
parser = get_parser()
args = parser.parse_args()
validate_args(args)
vcl = hvac.Client(url=args.vault_address,
token=args.vault_token if args.vault_token else None,
verify=args.pin_cacert if args.pin_cacert else args.verify,
cert=(
args.client_cert,
args.client_key
) if args.client_cert else None)
if not vcl.is_authenticated():
try:
if args.client_cert:
vcl.auth_tls()
if args.vault_appid:
vcl.auth_app_id(args.vault_appid, args.vault_userid)
except RequestException as e:
_out("* ERR VAULT CERT UTIL *: Failure while authenticating to Vault. (%s)" % str(e))
sys.exit(1)
if not vcl.is_authenticated():
_out("* ERR VAULT CERT UTIL *: vaultgetcert was unable to authenticate with Vault, but no error occured "
":(.")
sys.exit(1)
alt_names = args.subject_alt_names or ""
if args.git_include_commit_san:
try:
output = subprocess.check_output([args.git_binary, "rev-parse", "HEAD"],
stderr=subprocess.STDOUT, universal_newlines=True)
except subprocess.CalledProcessError as e:
_out("* ERR VAULT CERT UTIL *: Error %s. trying to get the Git commit hash (git rev-parse HEAD) failed "
"with\n%s" % (e.returncode, e.output))
sys.exit(e.returncode)
output = output.strip()
if len(output) != 40:
_out("* ERR VAULT CERT UTIL *: Git returned a commit-hash of length %s (%s) instead of 40." %
(len(output), output))
sys.exit(1)
if alt_names == "":
alt_names = "%s.git" % output
else:
alt_names = "%s.git,%s" % (output, alt_names)
try:
res = vcl.write(args.vault_pki, common_name=args.common_name, alt_names=alt_names,
exclude_cn_from_sans=not args.include_cn_in_sans)
except RequestException as e:
_out("* ERR VAULT WRAPPER *: Unable to read Vault path %s. (%s)" % (args.cert_key, str(e)))
sys.exit(1)
if "data" not in res or "certificate" not in res["data"] or "private_key" not in res["data"]:
_out("* ERR VAULT CERT UTIL *: Vault returned a value without the necessary fields "
"(data->certificate,private_key). Returned dict was:\n%s" %
str(res))
if os.path.dirname(args.certfile) != "" and not os.path.exists(os.path.dirname(args.certfile)):
_out("* INF VAULT CERT UTIL *: Creating folder %s" % os.path.dirname(args.certfile))
os.makedirs(os.path.dirname(args.certfile), mode=_get_masked_mode(args.mode_certs_dir), exist_ok=True)
if os.path.dirname(args.keyfile) != "" and not os.path.exists(os.path.dirname(args.keyfile)):
_out("* INF VAULT CERT UTIL *: Creating folder %s" % os.path.dirname(args.keyfile))
os.makedirs(os.path.dirname(args.keyfile), mode=_get_masked_mode(args.mode_key_dir), exist_ok=True)
for bundlename in xsign_bundles.keys():
if os.path.dirname(bundlename) != "" and not os.path.exists(os.path.dirname(bundlename)):
_out("* INF VAULT CERT UTIL *: Creating folder %s" % os.path.dirname(bundlename))
os.makedirs(os.path.dirname(bundlename), mode=_get_masked_mode(args.mode_certs_dir),
exist_ok=True)
with open(args.certfile, "wt", encoding="ascii") as certfile, \
open(args.keyfile, "wt", encoding="ascii") as keyfile:
os.chmod(args.certfile, _get_masked_mode(args.mode_certbundle_files))
os.chmod(args.keyfile, _get_masked_mode(args.mode_key_file))
certfile.write(res["data"]["certificate"].strip())
certfile.write("\n")
keyfile.write(res["data"]["private_key"].strip())
keyfile.write("\n")
if args.certchain:
with open(args.certchain, "wt", encoding="ascii") as certchain:
certchain.write(res["data"]["issuing_ca"].strip())
certchain.write("\n")
_out("* INF VAULT CERT UTIL *: the issued certificate and key have been stored in %s and %s" %
(args.certfile, args.keyfile))
if args.certchain:
_out("* INF VAULT CERT UTIL *: the certificate chain has been stored in %s" % args.certchain)
vault_pubkey = crypto.load_certificate(
crypto.FILETYPE_PEM,
res["data"]["issuing_ca"]
).get_pubkey().to_cryptography_key().public_numbers()
vault_subject = crypto.load_certificate(
crypto.FILETYPE_PEM,
res["data"]["issuing_ca"]
).get_subject().get_components()
if args.bundlepath and not os.path.exists(args.bundlepath):
os.makedirs(args.bundlepath, mode=_get_masked_mode(args.mode_certs_dir), exist_ok=True)
for bundlename in xsign_bundles.keys():
if xsign_bundles[bundlename] is None:
x509str = res["data"]["issuing_ca"]
else:
with open(xsign_bundles[bundlename], mode="rt", encoding="ascii") as xcacert:
x509str = xcacert.read()
# the cross-signing certificate must sign the same keypair as the issueing_ca returned by Vault.
# Let's check...
xsign_pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, x509str).get_pubkey() \
.to_cryptography_key().public_numbers()
if vault_pubkey != xsign_pubkey:
xsign_subject = crypto.load_certificate(crypto.FILETYPE_PEM, x509str).get_subject().get_components()
_out("* ERR VAULT CERT UTIL *: Cross-signing certificate %s has a different public key as the CA returned "
"by Vault. This certificate is invalid for the bundle.\n"
"***Xsign subject***\n%s\n***Vault subject***\n%s" %
(bundlename,
", ".join(["%s=%s" % (k.decode("utf-8"), v.decode("utf-8")) for k, v in xsign_subject]),
", ".join(["%s=%s" % (k.decode("utf-8"), v.decode("utf-8")) for k, v in vault_subject])))
sys.exit(1)
fn = bundlename
if args.bundlepath and not os.path.isabs(bundlename):
fn = os.path.join(args.bundlepath, os.path.basename(bundlename))
with open(fn, "wt", encoding="ascii") as bundle:
_out("* INF VAULT CERT UTIL *: Creating bundle %s" % fn)
bundle.write(res["data"]["certificate"].strip())
bundle.write("\n")
bundle.write(x509str.strip())
bundle.write("\n")
if args.output and args.envdir_mode:
if not os.path.exists(args.output):
os.makedirs(args.output, mode=_get_masked_mode(0o755), exist_ok=True)
_result = cast(Callable[..., None], functools.partial(_result_envdir, args.output))
_out("writing envdir to %s" % args.output)
elif args.output:
if not os.path.exists(os.path.dirname(args.output)):
os.makedirs(os.path.dirname(args.output), mode=_get_masked_mode(0o755), exist_ok=True)
out_target = cast(TextIO, open(args.output, mode="wt", encoding="utf-8"))
_out("writing output to %s" % args.output)
for bundleref in bundle_vars.keys():
# _result goes to stdout or --output
fn = bundleref
if args.bundlepath and not os.path.isabs(bundleref):
fn = os.path.join(args.bundlepath, bundleref)
_result(bundle_vars[bundleref]["envvar"],
fn.replace(os.path.dirname(fn), bundle_vars[bundleref]["altpath"])
if bundle_vars[bundleref]["altpath"] else fn)
for keyvar in args.key_envvars:
if ":" in keyvar:
envvar, altpath = keyvar.split(":", 1)
else:
envvar, altpath = keyvar, None
_result(envvar, args.keyfile.replace(os.path.dirname(args.keyfile), altpath) if altpath else args.keyfile)
if args.output:
out_target.close()
_out("*** Done.")
if __name__ == "__main__":
main()
|
mpl-2.0
| -4,284,949,911,080,452,000
| 55.842282
| 120
| 0.591977
| false
| 3.877976
| true
| false
| false
|
rekyuu/rpyg
|
src/objects/tiles.py
|
1
|
2154
|
# Default datasets for dungeon tiles.
walls = {
'north' : True,
'east' : True,
'south' : True,
'west' : True
}
entities = {
'items' : [],
'objects' : [],
'enemies' : [],
'npcs' : []
}
# Defines a series of tiles with walls.
class Tile (object):
def __init__ (self, walls=walls, entities=entities, text=''):
# Indentifies an identified tile for maze generation.
self.visited = False
# Tile walls definitions, defined by a dictionary of booleans or definitions.
self.wall_north = walls['north']
self.wall_east = walls['east']
self.wall_south = walls['south']
self.wall_west = walls['west']
# Defines if the tile is an entrance or exit.
self.entrance = False
self.exit = False
# Lists of various entities on the tile.
self.items = entities['items']
self.objects = entities['objects']
self.enemies = entities['enemies']
self.npcs = entities['npcs']
# Text that displays when the player enters the tile.
self.text = text
# Removes walls during generation.
def remove_wall (self, wall):
if wall == 'north':
self.wall_north = False
elif wall == 'east':
self.wall_east = False
elif wall == 'south':
self.wall_south = False
elif wall == 'west':
self.wall_west = False
# Marks a tile as processed during generation.
def visit (self):
self.visited = True
# Sets the tile as the entrance.
def set_entrance (self):
self.entrance = True
# Sets the tile as the exit.
def set_exit (self):
self.exit = True
# Sets a list of items on the tile.
def set_items (self, items):
self.items = items
# Sets a list of interactable objects on the tile.
def set_objects (self, objects):
self.objects = objects
# Sets a list of enemies on the tile.
def set_enemies (self, enemies):
self.enemies = enemies
# Sets a list of npcs on the tile.
def set_npcs (self, npcs):
self.npcs = npcs
# Text that displays as the player(s) enter the tile.
def enter_text (self):
out = ['You enter a dim corridor.']
if self.exit:
out.append('\nYou find yourself at the exit.')
out = ''.join(out)
return out
def set_text (self, text):
self.text = text
|
mit
| -8,520,644,150,798,013,000
| 20.117647
| 79
| 0.654596
| false
| 2.860558
| false
| false
| false
|
ibid/ibid
|
ibid/source/__init__.py
|
1
|
2434
|
# Copyright (c) 2008-2010, Michael Gorven, Stefano Rivera
# Released under terms of the MIT/X/Expat Licence. See COPYING for details.
from copy import copy
try:
from twisted.plugin import pluginPackagePaths
except ImportError:
# Not available in Twisted 2.5.0 in Ubuntu hardy
# This is straight from twisted.plugin
import os.path
import sys
def pluginPackagePaths(name):
package = name.split('.')
return [os.path.abspath(os.path.join(x, *package)) for x in sys.path
if not os.path.exists(os.path.join(x, *package + ['__init__.py']))]
__path__ = pluginPackagePaths(__name__) + __path__
class IbidSourceFactory(object):
supports = ()
auth = ()
permissions = ()
def __new__(cls, *args):
cls.type = cls.__module__.split('.')[2]
for name, option in options.items():
new = copy(option)
default = getattr(cls, name)
new.default = default
setattr(cls, name, new)
return super(IbidSourceFactory, cls).__new__(cls, *args)
def __init__(self, name):
self.name = name
self.setup()
def setup(self):
"Apply configuration. Called on every config reload"
pass
def setServiceParent(self, service):
"Start the source and connect"
raise NotImplementedError
def connect(self):
"Connect (if disconncted)"
return self.setServiceParent(None)
def disconnect(self):
"Disconnect source"
raise NotImplementedError
def url(self):
"Return a URL describing the source"
return None
def logging_name(self, identity):
"Given an identity or connection, return a name suitable for logging"
return identity
def truncation_point(self, response, event=None):
"""Given a target, and possibly a related event, return the number of
bytes to clip at, or None to indicate that a complete message will
be delivered.
"""
if (event is not None
and response.get('target', None) == event.get('channel', None)
and event.get('public', True)):
return 490
return None
from ibid.config import Option
options = {
'auth': Option('auth', 'Authentication methods to allow'),
'permissions': Option('permissions', 'Permissions granted to users on this source')
}
# vi: set et sta sw=4 ts=4:
|
gpl-3.0
| 6,954,459,534,629,569,000
| 28.325301
| 87
| 0.618324
| false
| 4.146508
| false
| false
| false
|
tfeldmann/tryagain
|
test_tryagain.py
|
1
|
8433
|
import mock
import pytest
import logging
import tryagain
import functools
class Namespace:
pass
def _return_true():
return True
def _raise_exception():
raise Exception()
def test_call_once():
assert tryagain.call(_return_true) is True
def test_call_twice():
assert tryagain.call(_return_true, max_attempts=2) is True
def test_raise_after_retry():
with pytest.raises(Exception):
tryagain.call(_raise_exception, max_attempts=2)
def test_wait_time():
def works_on_second_try():
if ns.count == 0:
ns.count = 1
raise ValueError
return True
ns = Namespace()
ns.count = 0
with mock.patch('time.sleep') as mock_sleep:
assert tryagain.call(works_on_second_try, wait=1.2) is True
mock_sleep.assert_called_once_with(1.2)
def test_custom_wait_function():
def mywait(attempt):
ns.counter = attempt
return 0
ns = Namespace()
ns.counter = 0
with pytest.raises(Exception):
tryagain.call(_raise_exception, wait=mywait, max_attempts=2)
assert ns.counter == 1
def test_repeat():
assert (
list(tryagain._repeat('x', times=10)) ==
['x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x', 'x'])
def test_is_callable():
with pytest.raises(TypeError):
tryagain._assert_callable(None, allow_none=False)
with pytest.raises(TypeError):
tryagain._assert_callable(3, allow_none=True)
assert tryagain._assert_callable(_return_true) is None
assert tryagain._assert_callable(lambda: None) is None
def test_attempts():
with pytest.raises(ValueError):
tryagain.call(_return_true, max_attempts=0)
assert tryagain.call(_return_true, max_attempts=None)
assert tryagain.call(_return_true, max_attempts=1)
def test_full_execution():
ns = Namespace()
actions = []
ns.count = 0
def unstable():
ns.count += 1
if ns.count == 3:
actions.append('success %s' % ns.count)
return 'result %s' % ns.count
else:
actions.append('fail %s' % ns.count)
raise Exception
def cleanup():
actions.append('cleanup %s' % ns.count)
def pre_retry():
actions.append('pre_retry %s' % ns.count)
def wait(attempt):
actions.append('wait %s' % attempt)
return 0
result = tryagain.call(unstable, wait=wait, max_attempts=5,
cleanup_hook=cleanup, pre_retry_hook=pre_retry)
print(actions)
assert actions == [
'fail 1', 'cleanup 1', 'wait 1', 'pre_retry 1',
'fail 2', 'cleanup 2', 'wait 2', 'pre_retry 2',
'success 3']
assert result == 'result 3'
def test_full_execution_decorator():
ns = Namespace()
actions = []
ns.count = 0
def cleanup():
actions.append('cleanup %s' % ns.count)
def pre_retry():
actions.append('pre_retry %s' % ns.count)
def wait(attempt):
actions.append('wait %s' % attempt)
return 0
@tryagain.retries(wait=wait, max_attempts=5,
cleanup_hook=cleanup, pre_retry_hook=pre_retry)
def unstable():
ns.count += 1
if ns.count == 3:
actions.append('success %s' % ns.count)
return 'result %s' % ns.count
else:
actions.append('fail %s' % ns.count)
raise Exception
result = unstable()
print(actions)
assert actions == [
'fail 1', 'cleanup 1', 'wait 1', 'pre_retry 1',
'fail 2', 'cleanup 2', 'wait 2', 'pre_retry 2',
'success 3']
assert result == 'result 3'
class reprwrapper(object):
def __init__(self, repr, func):
self._repr = repr
self._func = func
functools.update_wrapper(self, func)
def __call__(self, *args, **kw):
return self._func(*args, **kw)
def __repr__(self):
return self._repr
def test_logging():
ns = Namespace()
ns.count = 0
def unstable():
ns.count += 1
if ns.count == 2:
return True
else:
raise Exception('Exception message')
wrapped_unstable = reprwrapper('unstable', unstable)
logger = logging.getLogger('tryagain')
with mock.patch.object(logger, 'debug') as mock_debug:
assert tryagain.call(wrapped_unstable) is True
mock_debug.assert_called_once_with(
'Attempt 1 at calling unstable failed (Exception message)')
def test_logging_limited_attempts():
ns = Namespace()
ns.count = 0
def unstable():
ns.count += 1
if ns.count == 2:
return True
else:
raise Exception('Exception message')
wrapped_unstable = reprwrapper('unstable', unstable)
logger = logging.getLogger('tryagain')
with mock.patch.object(logger, 'debug') as mock_debug:
assert tryagain.call(wrapped_unstable, max_attempts=5) is True
mock_debug.assert_called_once_with(
'Attempt 1 / 5 at calling unstable failed (Exception message)')
def test_decorator():
ns = Namespace()
ns.count = 0
@tryagain.retries()
def unstable():
ns.count += 1
if ns.count == 2:
return True
else:
raise Exception('Exception message')
assert tryagain.call(unstable)
def test_decorator_with_parameters():
ns = Namespace()
ns.count = 0
@tryagain.retries(max_attempts=5)
def unstable():
ns.count += 1
if ns.count == 2:
return True
else:
raise Exception('Exception message')
assert tryagain.call(unstable)
def test_decorator_in_class():
class MyClass:
def __init__(self):
self.count = 0
@tryagain.retries(max_attempts=5)
def unstable(self, pass_on_count):
self.count += 1
if self.count == pass_on_count:
return True
else:
raise Exception('Exception message')
with pytest.raises(Exception):
c1 = MyClass()
c1.unstable(pass_on_count=10)
c2 = MyClass()
assert c2.unstable(pass_on_count=2) is True
def test_decorator_fails():
ns = Namespace()
ns.count = 0
@tryagain.retries(max_attempts=5)
def unstable(pass_on_count=2):
ns.count += 1
if ns.count == pass_on_count:
return True
else:
raise Exception('Exception message')
with pytest.raises(Exception):
unstable(pass_on_count=10)
ns.count = 0
assert unstable(pass_on_count=2) is True
def test_unexpected_exception():
@tryagain.retries(max_attempts=5, exceptions=(TypeError, ValueError))
def unstable():
ns.count += 1
raise EnvironmentError()
ns = Namespace()
ns.count = 0
with pytest.raises(EnvironmentError):
unstable()
assert ns.count == 1
def test_multiple_exceptions():
@tryagain.retries(exceptions=(ValueError, OSError))
def unstable(pass_on_count=2):
ns.count += 1
if ns.count == 1:
raise OSError
elif ns.count < pass_on_count:
raise ValueError
else:
return True
ns = Namespace()
ns.count = 0
assert unstable(pass_on_count=5) is True
def test_exception_in_wait_function():
def wait(attempt):
raise ValueError('Exception in wait function')
with pytest.raises(ValueError):
tryagain.call(_raise_exception, wait=wait)
def test_exception_in_cleanup_hook():
def cleanup():
raise ValueError('Exception in cleanup')
with pytest.raises(ValueError):
tryagain.call(_raise_exception, cleanup_hook=cleanup)
def test_exception_in_pre_retry_hook():
def pre_retry():
raise ValueError('Exception in pre_retry hook')
with pytest.raises(ValueError):
tryagain.call(_raise_exception, pre_retry_hook=pre_retry)
def test_callable_hooks():
def wait():
# parameter 'attempt' is missing
pass
def pre_retry(too, many, arguments):
pass
def cleanup(too, many, arguments):
pass
with pytest.raises(TypeError):
tryagain.call(_raise_exception, wait=wait)
with pytest.raises(TypeError):
tryagain.call(_raise_exception, pre_retry_hook=pre_retry)
with pytest.raises(TypeError):
tryagain.call(_raise_exception, cleanup_hook=cleanup)
|
mit
| 3,310,043,868,446,940,000
| 22.957386
| 75
| 0.59362
| false
| 3.776534
| true
| false
| false
|
paulcwatts/django-auth-utils
|
auth_utils/utils.py
|
1
|
1104
|
from django.contrib.auth.models import User
from django.conf import settings
def is_allowed_username(username):
disallowed = getattr(settings, 'AUTH_DISALLOWED_USERNAMES', [])
return username.lower() not in disallowed
def get_username(basename):
disallowed = getattr(settings, 'AUTH_DISALLOWED_USERNAMES', [])
# Truncate the basename to 27 characters
# (The username is only 30 characters)
basename = basename[:27]
if basename.lower() not in disallowed:
try:
# First just try their username
User.objects.get(username__iexact=basename)
except User.DoesNotExist:
return basename
i = 0
while True:
try:
username = basename + str(i)
if username.lower() not in disallowed:
User.objects.get(username__iexact=username)
i = i + 1
except User.DoesNotExist:
return username
def email_to_username(email):
# Generate a unique username from the email address
basename = email.split('@')[0].lower()
return get_username(basename)
|
bsd-3-clause
| 8,254,636,861,293,090,000
| 28.052632
| 67
| 0.641304
| false
| 4.433735
| false
| false
| false
|
tdsticks/crontab
|
py/wiki20/wiki20/model/__init__.py
|
1
|
2437
|
# -*- coding: utf-8 -*-
"""The application's model objects"""
from zope.sqlalchemy import ZopeTransactionExtension
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
# Global session manager: DBSession() returns the Thread-local
# session object appropriate for the current web request.
maker = sessionmaker(autoflush=True, autocommit=False,
extension=ZopeTransactionExtension())
DBSession = scoped_session(maker)
# Base class for all of our model classes: By default, the data model is
# defined with SQLAlchemy's declarative extension, but if you need more
# control, you can switch to the traditional method.
DeclarativeBase = declarative_base()
# There are two convenient ways for you to spare some typing.
# You can have a query property on all your model classes by doing this:
# DeclarativeBase.query = DBSession.query_property()
# Or you can use a session-aware mapper as it was used in TurboGears 1:
# DeclarativeBase = declarative_base(mapper=DBSession.mapper)
# Global metadata.
# The default metadata is the one from the declarative base.
metadata = DeclarativeBase.metadata
# If you have multiple databases with overlapping table names, you'll need a
# metadata for each database. Feel free to rename 'metadata2'.
# from sqlalchemy import MetaData
# metadata2 = MetaData()
#####
# Generally you will not want to define your table's mappers, and data objects
# here in __init__ but will want to create modules them in the model directory
# and import them at the bottom of this file.
######
def init_model(engine):
"""Call me before using any of the tables or classes in the model."""
DBSession.configure(bind=engine)
# If you are using reflection to introspect your database and create
# table objects for you, your tables must be defined and mapped inside
# the init_model function, so that the engine is available if you
# use the model outside tg2, you need to make sure this is called before
# you use the model.
#
# See the following example:
#
# global t_reflected
# t_reflected = Table("Reflected", metadata,
# autoload=True, autoload_with=engine)
# mapper(Reflected, t_reflected)
# Import your model modules here.
from wiki20.model.auth import User, Group, Permission
from wiki20.model.page import Page
__all__ = ('User', 'Group', 'Permission')
|
gpl-2.0
| -153,422,786,090,273,340
| 37.68254
| 78
| 0.736972
| false
| 4.082077
| false
| false
| false
|
qsnake/qsnake
|
spkg/base/qsnake_run.py
|
1
|
27165
|
#! /usr/bin/env python
import os
import sys
from time import sleep
from glob import glob
from os.path import expandvars
from optparse import OptionParser
import tempfile
import subprocess
import time
import urllib2
import json
version = "0.9.12"
release_date = "May 7, 2011"
class CmdException(Exception):
pass
class PackageBuildFailed(Exception):
pass
class PackageNotFound(Exception):
pass
def main():
systemwide_python = (os.environ["QSNAKE_SYSTEMWIDE_PYTHON"] == "yes")
if systemwide_python:
print """\
***************************************************
Qsnake is not installed. Running systemwide Python.
Only use this mode to install Qsnake.
***************************************************"""
parser = OptionParser(usage="""\
[options] [commands]
Commands:
update Updates the downloaded packages
install PACKAGE Installs the package 'PACKAGE'
list Lists all installed packages
test Runs the Qsnake testsuite
develop Equivalent of 'setup.py develop'""")
parser.add_option("--version",
action="store_true", dest="version",
default=False, help="print Qsnake version and exit")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose",
default=False, help="Make Qsnake verbose")
parser.add_option("-i", "--install",
action="store", type="str", dest="install", metavar="PACKAGE",
default="", help="install a spkg package")
parser.add_option("-f", "--force",
action="store_true", dest="force",
default=False, help="force the installation")
parser.add_option("-d", "--download_packages",
action="store_true", dest="download",
default=False, help="download standard spkg packages")
parser.add_option("-b", "--build",
action="store_true", dest="build",
default=False, help="build Qsnake")
parser.add_option("-j",
action="store", type="int", dest="cpu_count", metavar="NCPU",
default=0, help="number of cpu to use (0 = all), default 0")
parser.add_option("-s", "--shell",
action="store_true", dest="shell",
default=False, help="starts a Qsnake shell")
parser.add_option("--script",
action="store", type="str", dest="script", metavar="SCRIPT",
default=None, help="runs '/bin/bash SCRIPT' in a Qsnake shell")
# Not much used:
#parser.add_option("--python",
# action="store", type="str", dest="python", metavar="SCRIPT",
# default=None, help="runs 'python SCRIPT' in a Qsnake shell")
# These are not used either:
#parser.add_option("--unpack",
# action="store", type="str", dest="unpack", metavar="PACKAGE",
# default=None, help="unpacks the PACKAGE into the 'devel/' dir")
#parser.add_option("--pack",
# action="store", type="str", dest="pack", metavar="PACKAGE",
# default=None, help="creates 'devel/PACKAGE.spkg' from 'devel/PACKAGE'")
#parser.add_option("--devel-install",
# action="store", type="str", dest="devel_install", metavar="PACKAGE",
# default=None, help="installs 'devel/PACKAGE' into Qsnake directly")
parser.add_option("--create-package",
action="store", type="str", dest="create_package",
metavar="PACKAGE", default=None,
help="creates 'PACKAGE.spkg' in the current directory using the official git repository sources")
parser.add_option("--upload-package",
action="store", type="str", dest="upload_package",
metavar="PACKAGE", default=None,
help="upload 'PACKAGE.spkg' from the current directory to the server (for Qsnake developers only)")
parser.add_option("--release-binary",
action="store_true", dest="release_binary",
default=False, help="creates a binary release using the current state (for Qsnake developers only)")
parser.add_option("--lab",
action="store_true", dest="run_lab",
default=False, help="runs lab()")
parser.add_option("--verify-database",
action="store_true", dest="verify_database",
default=False,
help="verifies the package database integrity")
parser.add_option("--erase-binary",
action="store_true", dest="erase_binary",
default=False,
help="erases all binaries (keeps downloads)")
options, args = parser.parse_args()
if options.verbose:
global global_cmd_echo
global_cmd_echo = True
if len(args) == 1:
arg, = args
if arg == "update":
command_update()
return
elif arg == "list":
command_list()
return
elif arg == "develop":
command_develop()
return
elif arg == "test":
run_tests()
return
print "Unknown command"
sys.exit(1)
elif len(args) == 2:
arg1, arg2 = args
if arg1 == "install":
try:
install_package(arg2, cpu_count=options.cpu_count,
force_install=options.force)
except PackageBuildFailed:
print
print "Package build failed."
return
print "Unknown command"
sys.exit(1)
elif len(args) == 0:
pass
else:
print "Too many arguments"
sys.exit(1)
if options.download:
download_packages()
return
if options.install:
try:
install_package(options.install, cpu_count=options.cpu_count,
force_install=options.force)
except PackageBuildFailed:
pass
return
if options.build:
build(cpu_count=options.cpu_count)
return
if options.shell:
print "Type CTRL-D to exit the Qsnake shell."
cmd("cd $CUR; /bin/bash --rcfile $QSNAKE_ROOT/spkg/base/qsnake-shell-rc")
return
if options.script:
setup_cpu(options.cpu_count)
try:
cmd("cd $CUR; /bin/bash " + options.script)
except CmdException:
print "Qsnake script exited with an error."
return
#if options.python:
# cmd("cd $CUR; /usr/bin/env python " + options.python)
# return
#if options.unpack:
# pkg = pkg_make_absolute(options.unpack)
# print "Unpacking '%(pkg)s' into 'devel/'" % {"pkg": pkg}
# cmd("mkdir -p $QSNAKE_ROOT/devel")
# cmd("cd $QSNAKE_ROOT/devel; tar xjf %s" % pkg)
# return
#if options.pack:
# dir = options.pack
# if not os.path.exists(dir):
# dir = expandvars("$QSNAKE_ROOT/devel/%s" % dir)
# if not os.path.exists(dir):
# raise Exception("Unknown package to pack")
# dir = os.path.split(dir)[1]
# print "Creating devel/%(dir)s.spkg from devel/%(dir)s" % {"dir": dir}
# cmd("cd $QSNAKE_ROOT/devel; tar cjf %(dir)s.spkg %(dir)s" % \
# {"dir": dir})
# return
#if options.devel_install:
# dir = options.devel_install
# if not os.path.exists(dir):
# dir = expandvars("$QSNAKE_ROOT/devel/%s" % dir)
# if not os.path.exists(dir):
# raise Exception("Unknown package to pack")
# dir = os.path.normpath(dir)
# dir = os.path.split(dir)[1]
# print "Installing devel/%(dir)s into Qsnake" % {"dir": dir}
# cmd("mkdir -p $QSNAKE_ROOT/spkg/build/")
# cmd("rm -rf $QSNAKE_ROOT/spkg/build/%(dir)s" % {"dir": dir})
# cmd("cp -r $QSNAKE_ROOT/devel/%(dir)s $QSNAKE_ROOT/spkg/build/" % \
# {"dir": dir})
# setup_cpu(options.cpu_count)
# cmd("cd $QSNAKE_ROOT/spkg/build/%(dir)s; /bin/bash spkg-install" % \
# {"dir": dir})
# cmd("rm -rf $QSNAKE_ROOT/spkg/build/%(dir)s" % {"dir": dir})
# return
if options.create_package:
create_package(options.create_package)
return
if options.upload_package:
upload_package(options.upload_package)
return
if options.release_binary:
release_binary()
return
if options.run_lab:
run_lab()
return
if options.verify_database:
verify_database()
return
if options.erase_binary:
erase_binary()
return
if options.version:
show_version()
return
if systemwide_python:
parser.print_help()
else:
start_qsnake()
def setup_cpu(cpu_count):
if cpu_count == 0:
try:
import multiprocessing
cpu_count = multiprocessing.cpu_count() + 1
except ImportError:
cpu_count = 1
if cpu_count > 1:
os.environ["MAKEFLAGS"] = "-j %d" % cpu_count
# If this variable is True, "cmd" will echo each command. It'd be nice to
# refactor this somehow, so that we don't need this global variable. This
# variable is set to True if the user passes the "-v" switch to qsnake:
global_cmd_echo = False
def cmd(s, capture=False, ok_exit_code_list=None, echo=False):
"""
ok_exit_code_list ... a list of ok exit codes (otherwise cmd() raises an
exception)
"""
if ok_exit_code_list is None:
ok_exit_code_list = [0]
if echo or global_cmd_echo:
print s
s = expandvars(s)
if capture:
p = subprocess.Popen(s, shell=True, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output = p.communicate()[0]
r = p.returncode
else:
output = None
r = os.system(s)
if r not in ok_exit_code_list:
raise CmdException("Command '%s' failed with err=%d." % (s, r))
return output
def create_package(package):
git_repo = "http://github.com/qsnake/" + package + ".git"
a = git_repo.rfind("/") + 1
b = git_repo.rfind(".git")
dir_name = git_repo[a:b]
print "Creating a package in the current directory."
print "Package name:", package
print "Git repository:", git_repo
tmp = tempfile.mkdtemp()
print "Using temporary directory:", tmp
cur = cmd("echo $CUR", capture=True).strip()
cmd("cd %s; git clone --depth 1 %s" % (tmp, git_repo))
commit = cmd("cd %s/%s; git rev-parse HEAD" % (tmp, dir_name),
capture=True).strip()
cmd("cd %s/%s; rm -rf .git" % (tmp, dir_name))
sha = commit[:7]
if os.path.exists("%s/%s/spkg-prepare" % (tmp, dir_name)):
print "spkg-prepare found, running it..."
cmd("cd %s/%s; sh spkg-prepare" % (tmp, dir_name))
if os.path.exists("%s/%s/spkg-install" % (tmp, dir_name)):
print "spkg-install file exists, not doing anything"
elif os.path.exists("%s/%s/setup.py" % (tmp, dir_name)):
print "spkg-install file doesn't exist, creating one for setup.py"
f = open("%s/%s/spkg-install" % (tmp, dir_name), "w")
f.write("""
#! /bin/sh
if [ "$SPKG_LOCAL" = "" ]; then
echo "SPKG_LOCAL undefined ... exiting";
echo "Maybe run 'qsnake --shell'?"
exit 1
fi
set -e
python setup.py install
""")
f.close()
else:
raise Exception("spkg-install nor setup.py is present")
new_dir_name = "%s-%s" % (package, sha)
pkg_filename = "%s.spkg" % (new_dir_name)
cmd("cd %s; mv %s %s" % (tmp, dir_name, new_dir_name))
print "Creating the spkg package..."
cmd("cd %s; tar cjf %s %s" % (tmp, pkg_filename, new_dir_name))
cmd("cp %s/%s %s/%s" % (tmp, pkg_filename, cur, pkg_filename))
print
print "Package created: %s" % (pkg_filename)
def upload_package(package):
cmd("cd $CUR; scp %s spilka.math.unr.edu:/var/www3/qsnake.org/packages/qsnake_st/" % (package))
print "Package uploaded: %s" % (package)
def release_binary():
tmp = tempfile.mkdtemp()
qsnake_dir = "qsnake-%s" % version
print "Using temporary directory:", tmp
cur = cmd("echo $CUR", capture=True).strip()
cmd("mkdir %s/%s" % (tmp, qsnake_dir))
print "Copying qsnake into the temporary directory..."
cmd("cd $QSNAKE_ROOT; cp -r * %s/%s/" % (tmp, qsnake_dir))
print "Removing source SPKG packages"
cmd("rm -f %s/%s/spkg/standard/*" % (tmp, qsnake_dir))
print "Creating a binary tarball"
cmd("cd %s; tar czf %s.tar.gz %s" % (tmp, qsnake_dir, qsnake_dir))
cmd("cd $QSNAKE_ROOT; cp %s/%s.tar.gz ." % (tmp, qsnake_dir))
print
print "Package created: %s.tar.gz" % (qsnake_dir)
def show_version():
s = "Qsnake Version %s, Release Date: %s" % (version, release_date)
print s
def start_qsnake(debug=False):
if debug:
print "Loading IPython..."
try:
import IPython
except ImportError:
raise Exception("You need to install 'ipython'")
if debug:
print " Done."
banner_length = 70
l = "| Qsnake Version %s, Release Date: %s" % (version, release_date)
l += " " * (banner_length - len(l) - 1) + "|"
banner = "-" * banner_length + "\n" + l + "\n"
l = "| Type lab() for the GUI."
l += " " * (banner_length - len(l) - 1) + "|"
banner += l + "\n" + "-" * banner_length + "\n"
namespace = {"lab": run_lab}
os.environ["IPYTHONDIR"] = expandvars("$DOT_SAGE/ipython")
os.environ["IPYTHONRC"] = "ipythonrc"
if not os.path.exists(os.environ["IPYTHONRC"]):
cmd('mkdir -p "$DOT_SAGE"')
cmd('cp -r "$QSNAKE_ROOT/spkg/base/ipython" "$DOT_SAGE/"')
os.environ["MPLCONFIGDIR"] = expandvars("$DOT_SAGE/matplotlib")
if not os.path.exists(os.environ["MPLCONFIGDIR"]):
cmd('cp -r "$QSNAKE_ROOT/spkg/base/matplotlib" "$DOT_SAGE/"')
if debug:
print "Starting the main loop..."
c = IPython.config.loader.Config()
c.InteractiveShell.confirm_exit = False
IPython.frontend.terminal.embed.InteractiveShellEmbed(config=c,
user_ns=namespace, banner1=banner).mainloop(local_ns={})
def download_packages():
print "Downloading standard spkg packages"
cmd("mkdir -p $QSNAKE_ROOT/spkg/standard")
spkg, git, provided = get_standard_packages()
for p in spkg:
cmd("cd $QSNAKE_ROOT/spkg/standard; ../base/qsnake-wget %s" % p)
for p in git:
# Obtain the latest hash from github:
url = "https://api.github.com/repos/qsnake/%s/branches"
try:
data = urllib2.urlopen(url % p).read()
except urllib2.HTTPError:
print "Can't open the url:", url % p
raise
data = json.loads(data)
i = 0
while data[i]["name"] != "master": i += 1
commit = data[i]["commit"]["sha"]
sha = commit[:7]
path = "$QSNAKE_ROOT/spkg/standard/%s-%s.spkg" % (p, sha)
# If we already have this hash, do nothing, otherwise update the
# package:
if os.path.exists(expandvars(path)):
print "Package '%s' (%s) is current, not updating." % (p, sha)
else:
cmd("rm -f $QSNAKE_ROOT/spkg/standard/%s-*.spkg" % p)
cmd("cd $QSNAKE_ROOT/spkg/standard; ../../qsnake --create-package %s" % p)
print "\n"
def install_package_spkg(pkg):
print "Installing %s..." % pkg
name, version = extract_name_version_from_path(pkg)
cmd("mkdir -p $QSNAKE_ROOT/spkg/build")
cmd("mkdir -p $QSNAKE_ROOT/spkg/installed")
# Remove the possible old builddir
cmd("cd $QSNAKE_ROOT/spkg/build; rm -rf %s-%s" % (name, version))
try:
cmd("cd $QSNAKE_ROOT/spkg/build; tar xjf %s" % pkg)
except CmdException:
print "Not a bz2 archive, trying gzip..."
try:
cmd("cd $QSNAKE_ROOT/spkg/build; tar xzf %s" % pkg)
except CmdException:
print "Not a bz2 nor gzip archive, trying tar..."
cmd("cd $QSNAKE_ROOT/spkg/build; tar xf %s" % pkg)
cmd("cd $QSNAKE_ROOT/spkg/build/%s-%s; chmod +x spkg-install" % (name, version))
try:
cmd("cd $QSNAKE_ROOT/spkg/build/%s-%s; . $QSNAKE_ROOT/local/bin/qsnake-env; ./spkg-install" % (name, version))
except CmdException:
raise PackageBuildFailed()
cmd("cd $QSNAKE_ROOT/spkg/build; rm -rf %s-%s" % (name, version))
def install_package(pkg, install_dependencies=True, force_install=False,
cpu_count=0):
"""
Installs the package "pkg".
"pkg" can be either a full path, or just the name of the package (with or
without a version).
"install_dependencies" ... if True, it will also install all dependencies
"force_install" ... if True, it will install the package even if it has
been already installed
"cpu_count" ... number of processors to use (0 means the number of
processors in the machine)
Examples:
>>> install_package("http://qsnake.org/stpack/python-2.6.4.p9.spkg")
>>> install_package("spkg/standard/readline-6.0.spkg")
>>> install_package("readline-6.0.spkg")
>>> install_package("readline")
"""
if pkg.startswith("http") or pkg.startswith("www"):
# Download from the web:
remote = True
import tempfile
tmpdir = tempfile.mkdtemp()
cmd("wget --directory-prefix=" + tmpdir + " " + pkg)
pkg_name = os.path.split(pkg)
pkg = os.path.join(tmpdir,pkg_name[1])
elif pkg == ".":
# Install from the current directory, try to guess
# how to install it properly:
if os.path.exists(expandvars("$CUR/spkg-install")):
setup_cpu(cpu_count)
try:
cmd("cd $CUR; /bin/bash spkg-install")
except CmdException:
print "Qsnake 'install .' exited with an error."
elif os.path.exists(expandvars("$CUR/setup.py")):
try:
cmd("cd $CUR; python setup.py install")
except CmdException:
print "Qsnake 'python setup.py install' exited with an error."
else:
print "Don't know how to install from the current directory."
return
else:
# Install the 'pkg' package
remote = False
try:
pkg = pkg_make_absolute(pkg)
except PackageNotFound, p:
print p
sys.exit(1)
if is_installed(pkg):
if not force_install:
print "Package '%s' is already installed" % pkg_make_relative(pkg)
return
if install_dependencies:
print "Installing dependencies for %s..." % pkg
for dep in get_dependencies(pkg):
install_package(dep, install_dependencies=False,
cpu_count=cpu_count)
qsnake_scripts = ["qsnake-env"]
setup_cpu(cpu_count)
# Create the standard POSIX directories:
for d in ["bin", "doc", "include", "lib", "man", "share"]:
cmd("mkdir -p $QSNAKE_ROOT/local/%s" % d)
for script in qsnake_scripts:
cmd("cp $QSNAKE_ROOT/spkg/base/%s $QSNAKE_ROOT/local/bin/" % script)
install_package_spkg(pkg)
cmd("touch $QSNAKE_ROOT/spkg/installed/%s" % pkg_make_relative(pkg))
print
print "Package '%s' installed." % pkg_make_relative(pkg)
if remote:
from shutil import rmtree
rmtree(tmpdir)
def is_installed(pkg):
if pkg in get_system_packages():
return True
pkg = pkg_make_relative(pkg)
candidates = glob(expandvars("$QSNAKE_ROOT/spkg/installed/%s" % pkg))
if len(candidates) == 1:
return True
elif len(candidates) == 0:
return False
else:
raise Exception("Internal error: got more candidates in is_installed")
def pkg_make_absolute(pkg):
if pkg.endswith(".spkg"):
if os.path.exists(pkg):
return os.path.abspath(pkg)
pkg_current = expandvars("$CUR/%s" % pkg)
if os.path.exists(pkg_current):
return pkg_current
raise PackageNotFound("Package '%s' not found in the current directory" % pkg)
candidates = glob(expandvars("$QSNAKE_ROOT/spkg/standard/*.spkg"))
if len(candidates) == 0:
raise PackageNotFound("Package '%s' not found" % pkg)
cands = []
for p in candidates:
name, version = extract_name_version_from_path(p)
if name == pkg:
return p
if pkg in name:
cands.append(p)
if len(cands) == 0:
raise PackageNotFound("Package '%s' not found" % pkg)
elif len(cands) == 1:
return cands[0]
print "Too many candidates:"
print " " + "\n ".join(cands)
raise PackageNotFound("Ambiguous package name.")
def pkg_make_relative(pkg):
pkg = pkg_make_absolute(pkg)
name, version = extract_name_version_from_path(pkg)
return name
def make_unique(l):
m = []
for item in l:
if item not in m:
m.append(item)
return m
def get_dependencies(pkg):
"""
Gets all (including indirect) dependencies for the package "pkg".
For simplicity, the dependency graph is currently hardwired in this
function.
"""
provided = get_system_packages()
if pkg in provided:
return []
pkg_name = pkg_make_relative(pkg)
dependency_graph = get_dependency_graph()
deps = []
for dep in dependency_graph.get(pkg_name, []):
if dep in provided:
continue
deps.extend(get_dependencies(dep))
deps.append(dep)
deps = make_unique(deps)
return deps
def build(cpu_count=0):
print "Building Qsnake"
# Only add the packages that you want to have in Qsnake. Don't add
# dependencies (those are handled in the get_dependencies() function)
packages_list = [
# Basics:
"git",
"libqsnake",
# SciPy stack
"ipython",
"scipy",
"sympy",
"matplotlib",
"h5py",
# PDE packages:
"fipy",
"sfepy",
"phaml",
# Electronic structure packages:
"gpaw",
"elk",
]
try:
for pkg in packages_list:
install_package(pkg, cpu_count=cpu_count)
print
print "Finished building Qsnake."
except PackageBuildFailed:
print
print "Qsnake build failed."
def wait_for_ctrl_c():
try:
while 1:
sleep(1)
except KeyboardInterrupt:
pass
def run_lab():
"""
Runs the html notebook.
"""
print "Starting Web GUI: Open your web browser at http://localhost:8888/"
print "Press CTRL+C to kill it"
print
from IPython.frontend.html.notebook.notebookapp import NotebookApp
app = NotebookApp()
# This option enables Matplotlib:
app.initialize(["--pylab=inline"])
app.start()
def extract_version(package_name):
"""
Extracts the version from the package_name.
The version is defined as one of the following:
-3245s
-ab434
-1.1-343s
-2.3-4
-134-minimal-24
but not:
-ab-13
-ab-ab
-m14-m16
The leading "-" is discarded.
Example:
>>> extract_version("jinja-2.5")
'2.5'
"""
def numeric(c):
if c in ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9"]:
return True
return False
first_dash = package_name.find("-")
last_dash = package_name.rfind("-")
if first_dash == last_dash:
return package_name[first_dash+1:]
while not numeric(package_name[first_dash + 1]):
package_name = package_name[first_dash+1:]
first_dash = package_name.find("-")
last_dash = package_name.rfind("-")
if first_dash == last_dash:
return package_name[first_dash+1:]
return package_name[first_dash + 1:]
def extract_name_version(package_name):
"""
Extracts the name and the version.
Example:
>>> extract_name_version("jinja-2.5")
('jinja', '2.5')
"""
version = extract_version(package_name)
name = package_name[:-len(version)-1]
return name, version
def extract_name_version_from_path(p):
"""
Extracts the name and the version from the full path.
Example:
>> extract_name_version_from_path("/home/bla/jinja-2.5.spkg")
('jinja', '2.5')
"""
path, ext = os.path.splitext(p)
assert ext == ".spkg"
directory, filename = os.path.split(path)
return extract_name_version(filename)
def command_update():
print "Updating the git repository"
cmd("cd $QSNAKE_ROOT; git pull http://github.com/qsnake/qsnake.git master")
download_packages()
print "Done."
def command_list():
print "List of installed packages:"
cmd("cd $QSNAKE_ROOT; ls spkg/installed")
def command_develop():
print "Adding the current directory into qsnake.pth file:"
cmd("echo $CUR >> $SPKG_LOCAL/lib/python/site-packages/qsnake.pth",
echo=True)
def get_system_packages():
"""get a dict by platform of packages provided by the system."""
d = {}
d['darwin'] = [
'gnutls',
'openssl',
'termcap',
'zlib',
'bzip2',
'sqlite',
'uuid',
'lapack',
'curl'
]
return d.get(sys.platform, [])
def get_standard_packages():
from json import load
f = open(expandvars("$QSNAKE_ROOT/spkg/base/packages.json"))
data = load(f)
QSNAKE_STANDARD = "http://qsnake.googlecode.com/files"
spkg = []
git = []
provided = get_system_packages()
for p in data:
if p['name'] in provided:
print 'system provided: '+p['name']
continue
download = p["download"]
if download == "qsnake-spkg":
spkg.append(QSNAKE_STANDARD + "/" + p["name"] + "-" + \
p["version"] + ".spkg")
elif download == "qsnake-git":
git.append(p["name"])
else:
raise Exception("Unsupported 'download' field")
return spkg, git, provided
def get_dependency_graph():
from json import load
f = open(expandvars("$QSNAKE_ROOT/spkg/base/packages.json"))
data = load(f)
QSNAKE_STANDARD = "http://qsnake.googlecode.com/files"
graph = {}
for p in data:
graph[p["name"]] = p["dependencies"]
return graph
def verify_database():
print "Verifying the package database..."
try:
packages = get_standard_packages()
dependency_graph = get_dependency_graph()
for p in dependency_graph:
deps = dependency_graph[p]
for p2 in deps:
if not p2 in dependency_graph:
msg = "Dependency '%s' of the package '%s' doesn't exist"
raise Exception(msg % (p2, p))
print "OK"
except:
print "Failed."
print
print "More information about the error:"
raise
def erase_binary():
print "Deleting all installed files..."
cmd("rm -rf $QSNAKE_ROOT/local")
cmd("rm -rf $QSNAKE_ROOT/spkg/build")
cmd("rm -rf $QSNAKE_ROOT/spkg/installed")
print " Done."
def run_tests():
import qsnake
os.environ["MPLCONFIGDIR"] = expandvars("$QSNAKE_ROOT/spkg/base/matplotlib")
qsnake.test()
if __name__ == "__main__":
main()
|
bsd-3-clause
| 838,599,484,452,875,400
| 32.209046
| 118
| 0.577324
| false
| 3.586612
| false
| false
| false
|
bparzella/secsgem
|
secsgem/secs/data_items/sdack.py
|
1
|
1685
|
#####################################################################
# sdack.py
#
# (c) Copyright 2021, Benjamin Parzella. All rights reserved.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#####################################################################
"""SDACK data item."""
from .. import variables
from .base import DataItemBase
class SDACK(DataItemBase):
"""
Map setup acknowledge.
:Types: :class:`Binary <secsgem.secs.variables.Binary>`
:Length: 1
**Values**
+-------+---------------+--------------------------------------------+
| Value | Description | Constant |
+=======+===============+============================================+
| 0 | Received Data | :const:`secsgem.secs.data_items.SDACK.ACK` |
+-------+---------------+--------------------------------------------+
| 1-63 | Error | |
+-------+---------------+--------------------------------------------+
**Used In Function**
- :class:`SecsS12F02 <secsgem.secs.functions.SecsS12F02>`
"""
__type__ = variables.Binary
__count__ = 1
ACK = 0
|
lgpl-2.1
| 4,805,233,631,875,200,000
| 36.444444
| 78
| 0.457567
| false
| 4.773371
| false
| false
| false
|
Sakartu/stringinfo
|
stringinfo.py
|
1
|
1798
|
#!/usr/bin/env python3
# -*- coding: utf8 -*-
"""
Usage:
stringinfo [options] [--] [STRING]...
Options:
STRING The strings for which you want information. If none are given, read from stdin upto EOF. Empty strings are ignored.
--list List all plugins, with their descriptions and whether they're default or not
--all Run all plugins, even the ones that aren't default
--verbose Print debugging messages
--file INFILE Read inputs from inputfile, removing trailing newlines. BEWARE: leading/trailing whitespace is preserved!
Plugins:
"""
import colorama
from docopt import docopt
import sys
import veryprettytable
import plugins
from plugins import color
__author__ = 'peter'
def main():
args = docopt(__doc__ + plugins.usage_table())
# Find plugins
ps = plugins.get_plugins(args)
if args['--list']:
table = veryprettytable.VeryPrettyTable()
table.field_names = ('Name', 'Default', 'Description')
table.align = 'l'
for p in ps:
table.add_row((p.__name__,
color(p.default),
p.description))
print(table)
return
if args['--file']:
args['STRING'] = [x.strip('\n\r') for x in open(args['--file'], 'r')]
if not args['STRING']:
args['STRING'] = [sys.stdin.read()]
filter(None, args['STRING'])
# Initialize colorama
colorama.init()
# For each plugin, check if it's applicable and if so, run it
for p in ps:
plugin = p(args)
if plugin.sentinel():
print(plugin.header)
print(plugin.handle())
else:
if args['--verbose']:
print('Sentinel failed for {0}'.format(p.__name__))
if __name__ == '__main__':
main()
|
mit
| -4,475,708,929,865,140,700
| 25.850746
| 131
| 0.588432
| false
| 3.969095
| false
| false
| false
|
Code4SA/mma-dexter
|
dexter/processing/crawlers/thecitizentz.py
|
1
|
1629
|
from urlparse import urlparse, urlunparse
import re
from bs4 import BeautifulSoup
import requests
import logging
from .base import BaseCrawler
from ...models import Entity, Author, AuthorType
class TheCitizenTZCrawler(BaseCrawler):
TCTZ = re.compile('(www\.)?thecitizen.co.tz')
log = logging.getLogger(__name__)
def offer(self, url):
""" Can this crawler process this URL? """
parts = urlparse(url)
return bool(self.TCTZ.match(parts.netloc))
def extract(self, doc, raw_html):
""" Extract text and other things from the raw_html for this document. """
super(TheCitizenTZCrawler, self).extract(doc, raw_html)
soup = BeautifulSoup(raw_html)
# gather title
doc.title = self.extract_plaintext(soup.select("article.main.column .story-view header h1"))
#gather publish date
date = self.extract_plaintext(soup.select("article.main.column .story-view header h5"))
doc.published_at = self.parse_timestamp(date)
#gather text and summary
nodes = soup.select("article.main.column .story-view .article .body-copy p")
if len(nodes) > 1:
doc.summary = self.extract_plaintext(nodes[1:2])
doc.text = "\n\n".join(p.text.strip() for p in nodes[1:])
# gather author
author = date = self.extract_plaintext(soup.select("article.main.column .story-view .article .author")).replace("By ", '').split('@')[0]
if author:
doc.author = Author.get_or_create(author.strip(), AuthorType.journalist())
else:
doc.author = Author.unknown()
|
apache-2.0
| 2,409,094,208,924,812,000
| 34.413043
| 144
| 0.643953
| false
| 3.806075
| false
| false
| false
|
deapplegate/wtgpipeline
|
blank.py
|
1
|
4506
|
def add_correction_new(cat_list,OBJNAME,FILTER,PPRUN):
import scipy, re, string, os
''' create chebychev polynomials '''
cheby_x = [{'n':'0x','f':lambda x,y:1.},{'n':'1x','f':lambda x,y:x},{'n':'2x','f':lambda x,y:2*x**2-1},{'n':'3x','f':lambda x,y:4*x**3.-3*x}]
cheby_y = [{'n':'0y','f':lambda x,y:1.},{'n':'1y','f':lambda x,y:y},{'n':'2y','f':lambda x,y:2*y**2-1},{'n':'3y','f':lambda x,y:4*y**3.-3*y}]
cheby_terms = []
cheby_terms_no_linear = []
for tx in cheby_x:
for ty in cheby_y:
if not ((tx['n'] == '0x' and ty['n'] == '0y')): # or (tx['n'] == '0x' and ty['n'] == '1y') or (tx['n'] == '1x' and ty['n'] == '0y')) :
cheby_terms.append({'n':tx['n'] + ty['n'],'fx':tx['f'],'fy':ty['f']})
if not ((tx['n'] == '0x' and ty['n'] == '0y') or (tx['n'] == '0x' and ty['n'] == '1y') or (tx['n'] == '1x' and ty['n'] == '0y')) :
cheby_terms_no_linear.append({'n':tx['n'] + ty['n'],'fx':tx['f'],'fy':ty['f']})
cov = 1
if cov:
samples = [['sdss',cheby_terms,True]] #,['nosdss',cheby_terms_no_linear,False]] #[['nosdss',cheby_terms_no_linear],['sdss',cheby_terms]]
else:
samples = [['nosdss',cheby_terms_no_linear,False]]
sample = 'sdss'
sample_size = 'all'
import re, time
dt = get_a_file(OBJNAME,FILTER,PPRUN)
d = get_fits(OBJNAME,FILTER,PPRUN)
print d.keys()
column_prefix = sample+'$'+sample_size+'$'
position_columns_names = re.split('\,',d[column_prefix + 'positioncolumns'])
print position_columns_names, 'position_columns_names'
fitvars = {}
cheby_terms_dict = {}
print column_prefix, position_columns_names
for ele in position_columns_names:
print ele
if type(ele) != type({}):
ele = {'name':ele}
res = re.split('$',ele['name'])
fitvars[ele['name']] = float(d[sample+'$'+sample_size+'$'+ele['name']])
for term in cheby_terms:
if term['n'] == ele['name'][2:]:
cheby_terms_dict[term['n']] = term
cheby_terms_use = [cheby_terms_dict[k] for k in cheby_terms_dict.keys()]
print cheby_terms_use, fitvars
CHIPS = [int(x) for x in re.split(',',dt['CHIPS'])]
LENGTH1, LENGTH2 = dt['LENGTH1'], dt['LENGTH2']
per_chip = True
coord_conv_x = lambda x:(2.*x-0-LENGTH1)/(LENGTH1-0)
coord_conv_y = lambda x:(2.*x-0-LENGTH2)/(LENGTH2-0)
''' make images of illumination corrections '''
for cat in cat_list:
for ROT in EXPS.keys():
for SUPA in EXPS[ROT]:
import re
print SUPA, cat
res = re.split('$',cat[1])
file = res[1]
print file, cat
if file == SUPA: rotation = ROT
print cat
p = pyfits.open(cat[0])
tab = p["OBJECTS"].data
print tab.field('MAG_AUTO')[0:10]
x = coord_conv_x(tab.field('Xpos_ABS'))
y = coord_conv_y(tab.field('Ypos_ABS'))
CHIPS = tab.field('CHIP')
chip_zps = []
for i in range(len(CHIPS)):
chip_zps.append(float(fitvars['zp_' + str(CHIPS[i])]))
chip_zps = scipy.array(chip_zps)
''' save pattern w/ chip zps '''
trial = False
children = []
x = coord_conv_x(x)
y = coord_conv_y(y)
''' correct w/ polynomial '''
epsilonC = 0
index = 0
for term in cheby_terms_use:
index += 1
print index, ROT, term, fitvars[str(ROT)+'$'+term['n']]
epsilonC += fitvars[str(ROT)+'$'+term['n']]*term['fx'](x,y)*term['fy'](x,y)
''' add the zeropoint '''
epsilonC += chip_zps
''' save pattern w/o chip zps '''
print epsilon[0:20]
tab.field('MAG_AUTO')[:] = tab.field('MAG_AUTO')[:] - epsilonC
print tab.field('MAG_AUTO')[0:20]
new_name = cat[0].replace('.cat','.gradient.cat')
os.system('rm ' + new_name)
p.writeto(new_name)
cat_grads.append([new_name,cat[1]])
return cat_grads
|
mit
| 4,700,156,536,708,270,000
| 39.594595
| 146
| 0.458722
| false
| 3.18896
| false
| false
| false
|
aiven/aiven-client
|
tests/test_argx.py
|
1
|
1825
|
# Copyright 2020, Aiven, https://aiven.io/
#
# This file is under the Apache License, Version 2.0.
# See the file `LICENSE` for details.
try:
from functools import cached_property
except ImportError:
cached_property = None
from aiven.client.argx import arg, CommandLineTool
class TestCLI(CommandLineTool):
@arg()
def xxx(self):
"""7"""
@arg()
def aaa(self):
"""1"""
@arg()
def ccc(self):
"""4"""
class SubCLI(CommandLineTool):
@arg()
def yyy(self):
"""8"""
@arg()
def bbb(self):
"""2"""
@arg()
def ddd(self):
"""5"""
class SubCLI2(CommandLineTool):
@arg()
def yyz(self):
"""9"""
@arg()
def bbc(self):
"""3"""
@arg()
def dde(self):
"""6"""
def test_extended_commands_remain_alphabetically_ordered():
cli = TestCLI("testcli")
cli.extend_commands(cli) # Force the CLI to have its full arg set at execution
sl2 = SubCLI2("subcli2")
sl = SubCLI("subcli")
cli.extend_commands(sl2)
cli.extend_commands(sl)
action_order = [item.dest for item in cli.subparsers._choices_actions] # pylint: disable=protected-access
assert action_order == ["aaa", "bbb", "bbc", "ccc", "ddd", "dde", "xxx", "yyy", "yyz"]
class DescriptorCLI(CommandLineTool):
@property
def raise1(self):
raise RuntimeError("evaluated raise1")
if cached_property is not None:
@cached_property
def raise2(self):
raise RuntimeError("evaluated raise2")
@arg("something")
def example_command(self):
"""Example command."""
def test_descriptors_are_not_eagerly_evaluated():
cli = DescriptorCLI("DescriptorCLI")
calls = []
cli.add_cmds(calls.append)
assert calls == [cli.example_command]
|
apache-2.0
| -4,218,275,428,768,166,000
| 19.505618
| 110
| 0.592877
| false
| 3.509615
| false
| false
| false
|
geotagx/geotagx-pybossa-archive
|
pybossa/stats.py
|
1
|
14056
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask import current_app
from sqlalchemy.sql import text
from pybossa.core import db
from pybossa.cache import cache, memoize, ONE_DAY
from pybossa.model import TaskRun, Task
from pybossa.cache import FIVE_MINUTES, memoize
import string
import pygeoip
import operator
import datetime
import time
from datetime import timedelta
@memoize(timeout=ONE_DAY)
def get_task_runs(app_id):
"""Return all the Task Runs for a given app_id"""
task_runs = db.session.query(TaskRun).filter_by(app_id=app_id).all()
return task_runs
@memoize(timeout=ONE_DAY)
def get_tasks(app_id):
"""Return all the tasks for a given app_id"""
tasks = db.session.query(Task).filter_by(app_id=app_id).all()
return tasks
@memoize(timeout=ONE_DAY)
def get_avg_n_tasks(app_id):
"""Return the average number of answers expected per task,
and the number of tasks"""
sql = text('''SELECT COUNT(task.id) as n_tasks,
AVG(task.n_answers) AS "avg" FROM task
WHERE task.app_id=:app_id;''')
results = db.engine.execute(sql, app_id=app_id)
for row in results:
avg = float(row.avg)
total_n_tasks = row.n_tasks
return avg, total_n_tasks
@memoize(timeout=ONE_DAY)
def stats_users(app_id):
"""Return users's stats for a given app_id"""
users = {}
auth_users = []
anon_users = []
# Get Authenticated Users
sql = text('''SELECT task_run.user_id AS user_id,
COUNT(task_run.id) as n_tasks FROM task_run
WHERE task_run.user_id IS NOT NULL AND
task_run.user_ip IS NULL AND
task_run.app_id=:app_id
GROUP BY task_run.user_id ORDER BY n_tasks DESC
LIMIT 5;''')
results = db.engine.execute(sql, app_id=app_id)
for row in results:
auth_users.append([row.user_id, row.n_tasks])
sql = text('''SELECT count(distinct(task_run.user_id)) AS user_id FROM task_run
WHERE task_run.user_id IS NOT NULL AND
task_run.user_ip IS NULL AND
task_run.app_id=:app_id;''')
results = db.engine.execute(sql, app_id=app_id)
for row in results:
users['n_auth'] = row[0]
# Get all Anonymous Users
sql = text('''SELECT task_run.user_ip AS user_ip,
COUNT(task_run.id) as n_tasks FROM task_run
WHERE task_run.user_ip IS NOT NULL AND
task_run.user_id IS NULL AND
task_run.app_id=:app_id
GROUP BY task_run.user_ip ORDER BY n_tasks DESC;''')
results = db.engine.execute(sql, app_id=app_id)
for row in results:
anon_users.append([row.user_ip, row.n_tasks])
sql = text('''SELECT COUNT(DISTINCT(task_run.user_ip)) AS user_ip FROM task_run
WHERE task_run.user_ip IS NOT NULL AND
task_run.user_id IS NULL AND
task_run.app_id=:app_id;''')
results = db.engine.execute(sql, app_id=app_id)
for row in results:
users['n_anon'] = row[0]
return users, anon_users, auth_users
@memoize(timeout=ONE_DAY)
def stats_dates(app_id):
dates = {}
dates_anon = {}
dates_auth = {}
dates_n_tasks = {}
task_runs = get_task_runs(app_id)
avg, total_n_tasks = get_avg_n_tasks(app_id)
for tr in task_runs:
# Data for dates
date, hour = string.split(tr.finish_time, "T")
tr.finish_time = string.split(tr.finish_time, '.')[0]
hour = string.split(hour, ":")[0]
# Dates
if date in dates.keys():
dates[date] += 1
else:
dates[date] = 1
if date in dates_n_tasks.keys():
dates_n_tasks[date] = total_n_tasks * avg
else:
dates_n_tasks[date] = total_n_tasks * avg
if tr.user_id is None:
if date in dates_anon.keys():
dates_anon[date] += 1
else:
dates_anon[date] = 1
else:
if date in dates_auth.keys():
dates_auth[date] += 1
else:
dates_auth[date] = 1
return dates, dates_n_tasks, dates_anon, dates_auth
@memoize(timeout=ONE_DAY)
def stats_hours(app_id):
hours = {}
hours_anon = {}
hours_auth = {}
max_hours = 0
max_hours_anon = 0
max_hours_auth = 0
task_runs = get_task_runs(app_id)
# initialize hours keys
for i in range(0, 24):
hours[str(i).zfill(2)] = 0
hours_anon[str(i).zfill(2)] = 0
hours_auth[str(i).zfill(2)] = 0
for tr in task_runs:
# Hours
date, hour = string.split(tr.finish_time, "T")
tr.finish_time = string.split(tr.finish_time, '.')[0]
hour = string.split(hour, ":")[0]
if hour in hours.keys():
hours[hour] += 1
if (hours[hour] > max_hours):
max_hours = hours[hour]
if tr.user_id is None:
if hour in hours_anon.keys():
hours_anon[hour] += 1
if (hours_anon[hour] > max_hours_anon):
max_hours_anon = hours_anon[hour]
else:
if hour in hours_auth.keys():
hours_auth[hour] += 1
if (hours_auth[hour] > max_hours_auth):
max_hours_auth = hours_auth[hour]
return hours, hours_anon, hours_auth, max_hours, max_hours_anon, max_hours_auth
@memoize(timeout=ONE_DAY)
def stats_format_dates(app_id, dates, dates_n_tasks, dates_estimate,
dates_anon, dates_auth):
"""Format dates stats into a JSON format"""
dayNewStats = dict(label="Anon + Auth", values=[])
dayAvgAnswers = dict(label="Expected Answers", values=[])
dayEstimates = dict(label="Estimation", values=[])
dayTotalStats = dict(label="Total", disabled="True", values=[])
dayNewAnonStats = dict(label="Anonymous", values=[])
dayNewAuthStats = dict(label="Authenticated", values=[])
total = 0
for d in sorted(dates.keys()):
# JavaScript expects miliseconds since EPOCH
# New answers per day
dayNewStats['values'].append(
[int(time.mktime(time.strptime(d, "%Y-%m-%d")) * 1000), dates[d]])
dayAvgAnswers['values'].append(
[int(time.mktime(time.strptime(d, "%Y-%m-%d")) * 1000),
dates_n_tasks[d]])
# Total answers per day
total = total + dates[d]
dayTotalStats['values'].append(
[int(time.mktime(time.strptime(d, "%Y-%m-%d")) * 1000), total])
# Anonymous answers per day
if d in (dates_anon.keys()):
dayNewAnonStats['values'].append(
[int(time.mktime(time.strptime(d, "%Y-%m-%d")) * 1000),
dates_anon[d]])
else:
dayNewAnonStats['values'].append(
[int(time.mktime(time.strptime(d, "%Y-%m-%d")) * 1000), 0])
# Authenticated answers per day
if d in (dates_auth.keys()):
dayNewAuthStats['values'].append(
[int(time.mktime(time.strptime(d, "%Y-%m-%d")) * 1000),
dates_auth[d]])
else:
dayNewAuthStats['values'].append(
[int(time.mktime(time.strptime(d, "%Y-%m-%d")) * 1000), 0])
for d in sorted(dates_estimate.keys()):
dayEstimates['values'].append(
[int(time.mktime(time.strptime(d, "%Y-%m-%d")) * 1000),
dates_estimate[d]])
dayAvgAnswers['values'].append(
[int(time.mktime(time.strptime(d, "%Y-%m-%d")) * 1000),
dates_n_tasks.values()[0]])
return dayNewStats, dayNewAnonStats, dayNewAuthStats, \
dayTotalStats, dayAvgAnswers, dayEstimates
@memoize(timeout=ONE_DAY)
def stats_format_hours(app_id, hours, hours_anon, hours_auth,
max_hours, max_hours_anon, max_hours_auth):
"""Format hours stats into a JSON format"""
hourNewStats = dict(label="Anon + Auth", disabled="True", values=[], max=0)
hourNewAnonStats = dict(label="Anonymous", values=[], max=0)
hourNewAuthStats = dict(label="Authenticated", values=[], max=0)
hourNewStats['max'] = max_hours
hourNewAnonStats['max'] = max_hours_anon
hourNewAuthStats['max'] = max_hours_auth
for h in sorted(hours.keys()):
# New answers per hour
#hourNewStats['values'].append(dict(x=int(h), y=hours[h], size=hours[h]*10))
if (hours[h] != 0):
hourNewStats['values'].append([int(h), hours[h],
(hours[h] * 5) / max_hours])
else:
hourNewStats['values'].append([int(h), hours[h], 0])
# New Anonymous answers per hour
if h in hours_anon.keys():
#hourNewAnonStats['values'].append(dict(x=int(h), y=hours[h], size=hours_anon[h]*10))
if (hours_anon[h] != 0):
hourNewAnonStats['values'].append([int(h), hours_anon[h],
(hours_anon[h] * 5) / max_hours])
else:
hourNewAnonStats['values'].append([int(h), hours_anon[h], 0])
# New Authenticated answers per hour
if h in hours_auth.keys():
#hourNewAuthStats['values'].append(dict(x=int(h), y=hours[h], size=hours_auth[h]*10))
if (hours_auth[h] != 0):
hourNewAuthStats['values'].append([int(h), hours_auth[h],
(hours_auth[h] * 5) / max_hours])
else:
hourNewAuthStats['values'].append([int(h), hours_auth[h], 0])
return hourNewStats, hourNewAnonStats, hourNewAuthStats
@memoize(timeout=ONE_DAY)
def stats_format_users(app_id, users, anon_users, auth_users, geo=False):
"""Format User Stats into JSON"""
userStats = dict(label="User Statistics", values=[])
userAnonStats = dict(label="Anonymous Users", values=[], top5=[], locs=[])
userAuthStats = dict(label="Authenticated Users", values=[], top5=[])
userStats['values'].append(dict(label="Anonymous", value=[0, users['n_anon']]))
userStats['values'].append(dict(label="Authenticated", value=[0, users['n_auth']]))
for u in anon_users:
userAnonStats['values'].append(dict(label=u[0], value=[u[1]]))
for u in auth_users:
userAuthStats['values'].append(dict(label=u[0], value=[u[1]]))
# Get location for Anonymous users
top5_anon = []
top5_auth = []
loc_anon = []
# Check if the GeoLiteCity.dat exists
geolite = current_app.root_path + '/../dat/GeoLiteCity.dat'
if geo:
gic = pygeoip.GeoIP(geolite)
for u in anon_users:
if geo:
loc = gic.record_by_addr(u[0])
else:
loc = {}
if loc is None:
loc = {}
if (len(loc.keys()) == 0):
loc['latitude'] = 0
loc['longitude'] = 0
top5_anon.append(dict(ip=u[0], loc=loc, tasks=u[1]))
for u in anon_users:
if geo:
loc = gic.record_by_addr(u[0])
else:
loc = {}
if loc is None:
loc = {}
if (len(loc.keys()) == 0):
loc['latitude'] = 0
loc['longitude'] = 0
loc_anon.append(dict(ip=u[0], loc=loc, tasks=u[1]))
for u in auth_users:
sql = text('''SELECT name, fullname from "user" where id=:id;''')
results = db.engine.execute(sql, id=u[0])
for row in results:
fullname = row.fullname
name = row.name
top5_auth.append(dict(name=name, fullname=fullname, tasks=u[1]))
userAnonStats['top5'] = top5_anon[0:5]
userAnonStats['locs'] = loc_anon
userAuthStats['top5'] = top5_auth
return dict(users=userStats, anon=userAnonStats, auth=userAuthStats,
n_anon=users['n_anon'], n_auth=users['n_auth'])
@memoize(timeout=ONE_DAY)
def get_stats(app_id, geo=False):
"""Return the stats a given app"""
hours, hours_anon, hours_auth, max_hours, \
max_hours_anon, max_hours_auth = stats_hours(app_id)
users, anon_users, auth_users = stats_users(app_id)
dates, dates_n_tasks, dates_anon, dates_auth = stats_dates(app_id)
avg, total_n_tasks = get_avg_n_tasks(app_id)
sorted_answers = sorted(dates.iteritems(), key=operator.itemgetter(0))
if len(sorted_answers) > 0:
last_day = datetime.datetime.strptime(sorted_answers[-1][0], "%Y-%m-%d")
total_answers = sum(dates.values())
if len(dates) > 0:
avg_answers_per_day = total_answers / len(dates)
required_days_to_finish = ((avg * total_n_tasks) - total_answers) / avg_answers_per_day
pace = total_answers
dates_estimate = {}
for i in range(0, int(required_days_to_finish) + 2):
tmp = last_day + timedelta(days=(i))
tmp_str = tmp.date().strftime('%Y-%m-%d')
dates_estimate[tmp_str] = pace
pace = pace + avg_answers_per_day
dates_stats = stats_format_dates(app_id, dates, dates_n_tasks, dates_estimate,
dates_anon, dates_auth)
hours_stats = stats_format_hours(app_id, hours, hours_anon, hours_auth,
max_hours, max_hours_anon, max_hours_auth)
users_stats = stats_format_users(app_id, users, anon_users, auth_users, geo)
return dates_stats, hours_stats, users_stats
|
agpl-3.0
| 3,382,761,236,077,615,600
| 34.494949
| 97
| 0.57712
| false
| 3.425786
| false
| false
| false
|
rclmenezes/sqlalchemy
|
examples/vertical/dictlike.py
|
1
|
7696
|
"""Mapping a vertical table as a dictionary.
This example illustrates accessing and modifying a "vertical" (or
"properties", or pivoted) table via a dict-like interface. These are tables
that store free-form object properties as rows instead of columns. For
example, instead of::
# A regular ("horizontal") table has columns for 'species' and 'size'
Table('animal', metadata,
Column('id', Integer, primary_key=True),
Column('species', Unicode),
Column('size', Unicode))
A vertical table models this as two tables: one table for the base or parent
entity, and another related table holding key/value pairs::
Table('animal', metadata,
Column('id', Integer, primary_key=True))
# The properties table will have one row for a 'species' value, and
# another row for the 'size' value.
Table('properties', metadata
Column('animal_id', Integer, ForeignKey('animal.id'),
primary_key=True),
Column('key', UnicodeText),
Column('value', UnicodeText))
Because the key/value pairs in a vertical scheme are not fixed in advance,
accessing them like a Python dict can be very convenient. The example below
can be used with many common vertical schemas as-is or with minor adaptations.
"""
class VerticalProperty(object):
"""A key/value pair.
This class models rows in the vertical table.
"""
def __init__(self, key, value):
self.key = key
self.value = value
def __repr__(self):
return '<%s %r=%r>' % (self.__class__.__name__, self.key, self.value)
class VerticalPropertyDictMixin(object):
"""Adds obj[key] access to a mapped class.
This is a mixin class. It can be inherited from directly, or included
with multiple inheritence.
Classes using this mixin must define two class properties::
_property_type:
The mapped type of the vertical key/value pair instances. Will be
invoked with two positional arugments: key, value
_property_mapping:
A string, the name of the Python attribute holding a dict-based
relationship of _property_type instances.
Using the VerticalProperty class above as an example,::
class MyObj(VerticalPropertyDictMixin):
_property_type = VerticalProperty
_property_mapping = 'props'
mapper(MyObj, sometable, properties={
'props': relationship(VerticalProperty,
collection_class=attribute_mapped_collection('key'))})
Dict-like access to MyObj is proxied through to the 'props' relationship::
myobj['key'] = 'value'
# ...is shorthand for:
myobj.props['key'] = VerticalProperty('key', 'value')
myobj['key'] = 'updated value']
# ...is shorthand for:
myobj.props['key'].value = 'updated value'
print myobj['key']
# ...is shorthand for:
print myobj.props['key'].value
"""
_property_type = VerticalProperty
_property_mapping = None
__map = property(lambda self: getattr(self, self._property_mapping))
def __getitem__(self, key):
return self.__map[key].value
def __setitem__(self, key, value):
property = self.__map.get(key, None)
if property is None:
self.__map[key] = self._property_type(key, value)
else:
property.value = value
def __delitem__(self, key):
del self.__map[key]
def __contains__(self, key):
return key in self.__map
# Implement other dict methods to taste. Here are some examples:
def keys(self):
return self.__map.keys()
def values(self):
return [prop.value for prop in self.__map.values()]
def items(self):
return [(key, prop.value) for key, prop in self.__map.items()]
def __iter__(self):
return iter(self.keys())
if __name__ == '__main__':
from sqlalchemy import (MetaData, Table, Column, Integer, Unicode,
ForeignKey, UnicodeText, and_, not_, create_engine)
from sqlalchemy.orm import mapper, relationship, Session
from sqlalchemy.orm.collections import attribute_mapped_collection
metadata = MetaData()
# Here we have named animals, and a collection of facts about them.
animals = Table('animal', metadata,
Column('id', Integer, primary_key=True),
Column('name', Unicode(100)))
facts = Table('facts', metadata,
Column('animal_id', Integer, ForeignKey('animal.id'),
primary_key=True),
Column('key', Unicode(64), primary_key=True),
Column('value', UnicodeText, default=None),)
class AnimalFact(VerticalProperty):
"""A fact about an animal."""
class Animal(VerticalPropertyDictMixin):
"""An animal.
Animal facts are available via the 'facts' property or by using
dict-like accessors on an Animal instance::
cat['color'] = 'calico'
# or, equivalently:
cat.facts['color'] = AnimalFact('color', 'calico')
"""
_property_type = AnimalFact
_property_mapping = 'facts'
def __init__(self, name):
self.name = name
def __repr__(self):
return '<%s %r>' % (self.__class__.__name__, self.name)
mapper(Animal, animals, properties={
'facts': relationship(
AnimalFact, backref='animal',
collection_class=attribute_mapped_collection('key')),
})
mapper(AnimalFact, facts)
engine = create_engine("sqlite://")
metadata.create_all(engine)
session = Session(bind=engine)
stoat = Animal(u'stoat')
stoat[u'color'] = u'reddish'
stoat[u'cuteness'] = u'somewhat'
# dict-like assignment transparently creates entries in the
# stoat.facts collection:
print stoat.facts[u'color']
session.add(stoat)
session.commit()
critter = session.query(Animal).filter(Animal.name == u'stoat').one()
print critter[u'color']
print critter[u'cuteness']
critter[u'cuteness'] = u'very'
print 'changing cuteness:'
engine.echo = True
session.commit()
engine.echo = False
marten = Animal(u'marten')
marten[u'color'] = u'brown'
marten[u'cuteness'] = u'somewhat'
session.add(marten)
shrew = Animal(u'shrew')
shrew[u'cuteness'] = u'somewhat'
shrew[u'poisonous-part'] = u'saliva'
session.add(shrew)
loris = Animal(u'slow loris')
loris[u'cuteness'] = u'fairly'
loris[u'poisonous-part'] = u'elbows'
session.add(loris)
session.commit()
q = (session.query(Animal).
filter(Animal.facts.any(
and_(AnimalFact.key == u'color',
AnimalFact.value == u'reddish'))))
print 'reddish animals', q.all()
# Save some typing by wrapping that up in a function:
with_characteristic = lambda key, value: and_(AnimalFact.key == key,
AnimalFact.value == value)
q = (session.query(Animal).
filter(Animal.facts.any(
with_characteristic(u'color', u'brown'))))
print 'brown animals', q.all()
q = (session.query(Animal).
filter(not_(Animal.facts.any(
with_characteristic(u'poisonous-part', u'elbows')))))
print 'animals without poisonous-part == elbows', q.all()
q = (session.query(Animal).
filter(Animal.facts.any(AnimalFact.value == u'somewhat')))
print 'any animal with any .value of "somewhat"', q.all()
# Facts can be queried as well.
q = (session.query(AnimalFact).
filter(with_characteristic(u'cuteness', u'very')))
print 'just the facts', q.all()
|
mit
| -6,652,649,003,940,161,000
| 30.284553
| 80
| 0.618243
| false
| 3.826952
| false
| false
| false
|
OpenAcademy-OpenStack/nova-scheduler
|
nova/api/openstack/compute/plugins/v3/evacuate.py
|
1
|
3721
|
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from webob import exc
from nova.api.openstack import common
from nova.api.openstack.compute.schemas.v3 import evacuate
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api import validation
from nova import compute
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.openstack.common import strutils
from nova import utils
LOG = logging.getLogger(__name__)
ALIAS = "os-evacuate"
authorize = extensions.extension_authorizer('compute', 'v3:' + ALIAS)
class EvacuateController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(EvacuateController, self).__init__(*args, **kwargs)
self.compute_api = compute.API()
self.host_api = compute.HostAPI()
@extensions.expected_errors((400, 404, 409))
@wsgi.action('evacuate')
@validation.schema(evacuate.evacuate)
def _evacuate(self, req, id, body):
"""
Permit admins to evacuate a server from a failed host
to a new one.
"""
context = req.environ["nova.context"]
authorize(context)
evacuate_body = body["evacuate"]
host = evacuate_body["host"]
on_shared_storage = strutils.bool_from_string(
evacuate_body["on_shared_storage"])
password = None
if 'admin_password' in evacuate_body:
# check that if requested to evacuate server on shared storage
# password not specified
if on_shared_storage:
msg = _("admin password can't be changed on existing disk")
raise exc.HTTPBadRequest(explanation=msg)
password = evacuate_body['admin_password']
elif not on_shared_storage:
password = utils.generate_password()
try:
self.host_api.service_get_by_compute_host(context, host)
except exception.NotFound:
msg = _("Compute host %s not found.") % host
raise exc.HTTPNotFound(explanation=msg)
try:
instance = self.compute_api.get(context, id)
self.compute_api.evacuate(context, instance, host,
on_shared_storage, password)
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'evacuate')
except exception.InstanceNotFound as e:
raise exc.HTTPNotFound(explanation=e.format_message())
except exception.ComputeServiceInUse as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
return {'admin_password': password}
class Evacuate(extensions.V3APIExtensionBase):
"""Enables server evacuation."""
name = "Evacuate"
alias = ALIAS
version = 1
def get_resources(self):
return []
def get_controller_extensions(self):
controller = EvacuateController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
|
apache-2.0
| 1,858,886,254,443,595,000
| 35.126214
| 79
| 0.658425
| false
| 4.185602
| false
| false
| false
|
bmihelac/django-cruds
|
tests/test_integration.py
|
1
|
1639
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test.testcases import TestCase
from tests.testapp.models import (
Author,
)
class TestIntegration(TestCase):
def setUp(self):
self.author = Author.objects.create(name='Foo')
def test_list(self):
response = self.client.get('/testapp/author/')
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Foo')
def test_create(self):
response = self.client.get('/testapp/author/new/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/testapp/author/new/', {
'name': 'Bar',
})
instance = Author.objects.filter(name='Bar').get()
self.assertRedirects(response, '/testapp/author/%s/' % instance.pk)
def test_detail(self):
response = self.client.get('/testapp/author/%s/' %
self.author.pk)
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Foo')
def test_update(self):
url = '/testapp/author/%s/edit/' % self.author.pk
response = self.client.get(url)
self.assertEqual(response.status_code, 200)
response = self.client.post(url, {
'name': 'Fooz',
})
self.assertRedirects(response, '/testapp/author/%s/' % self.author.pk)
def test_delete(self):
url = '/testapp/author/%s/remove/' % self.author.pk
response = self.client.post(url)
self.assertEqual(Author.objects.count(), 0)
self.assertRedirects(response, '/testapp/author/')
|
bsd-3-clause
| 394,007,786,364,623,360
| 31.137255
| 78
| 0.610738
| false
| 3.893112
| true
| false
| false
|
googleads/google-ads-python
|
google/ads/googleads/v8/resources/types/detail_placement_view.py
|
1
|
2839
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.ads.googleads.v8.enums.types import (
placement_type as gage_placement_type,
)
__protobuf__ = proto.module(
package="google.ads.googleads.v8.resources",
marshal="google.ads.googleads.v8",
manifest={"DetailPlacementView",},
)
class DetailPlacementView(proto.Message):
r"""A view with metrics aggregated by ad group and URL or YouTube
video.
Attributes:
resource_name (str):
Output only. The resource name of the detail placement view.
Detail placement view resource names have the form:
``customers/{customer_id}/detailPlacementViews/{ad_group_id}~{base64_placement}``
placement (str):
Output only. The automatic placement string
at detail level, e. g. website URL, mobile
application ID, or a YouTube video ID.
display_name (str):
Output only. The display name is URL name for
websites, YouTube video name for YouTube videos,
and translated mobile app name for mobile apps.
group_placement_target_url (str):
Output only. URL of the group placement, e.g.
domain, link to the mobile application in app
store, or a YouTube channel URL.
target_url (str):
Output only. URL of the placement, e.g.
website, link to the mobile application in app
store, or a YouTube video URL.
placement_type (google.ads.googleads.v8.enums.types.PlacementTypeEnum.PlacementType):
Output only. Type of the placement, e.g.
Website, YouTube Video, and Mobile Application.
"""
resource_name = proto.Field(proto.STRING, number=1,)
placement = proto.Field(proto.STRING, number=7, optional=True,)
display_name = proto.Field(proto.STRING, number=8, optional=True,)
group_placement_target_url = proto.Field(
proto.STRING, number=9, optional=True,
)
target_url = proto.Field(proto.STRING, number=10, optional=True,)
placement_type = proto.Field(
proto.ENUM,
number=6,
enum=gage_placement_type.PlacementTypeEnum.PlacementType,
)
__all__ = tuple(sorted(__protobuf__.manifest))
|
apache-2.0
| -1,632,002,444,791,074,800
| 36.853333
| 93
| 0.66925
| false
| 4.120464
| false
| false
| false
|
heltonbiker/MapComplete
|
PyQt/FeatureDemos/helloQtDrawing.py
|
1
|
9092
|
#!/usr/bin/env python
# __author__ = 'helton'
import sip
sip.setapi('QVariant', 2)
from math import cos, pi, sin
from PyQt4 import QtCore, QtGui
class RenderArea(QtGui.QWidget):
def __init__(self, path, parent=None):
super(RenderArea, self).__init__(parent)
self.path = path
self.penWidth = 1
self.rotationAngle = 0
self.setBackgroundRole(QtGui.QPalette.Base)
def minimumSizeHint(self):
return QtCore.QSize(50, 50)
def sizeHint(self):
return QtCore.QSize(100, 100)
def setFillRule(self, rule):
self.path.setFillRule(rule)
self.update()
def setFillGradient(self, color1, color2):
self.fillColor1 = color1
self.fillColor2 = color2
self.update()
def setPenWidth(self, width):
self.penWidth = width
self.update()
def setPenColor(self, color):
self.penColor = color
self.update()
def setRotationAngle(self, degrees):
self.rotationAngle = degrees
self.update()
def paintEvent(self, event):
painter = QtGui.QPainter(self)
painter.setRenderHint(QtGui.QPainter.Antialiasing)
painter.scale(self.width() / 100.0, self.height() / 100.0)
painter.translate(50.0, 50.0)
painter.rotate(-self.rotationAngle)
painter.translate(-50.0, -50.0)
painter.setPen(QtGui.QPen(self.penColor, self.penWidth,
QtCore.Qt.SolidLine, QtCore.Qt.RoundCap, QtCore.Qt.RoundJoin))
gradient = QtGui.QLinearGradient(0, 0, 0, 100)
gradient.setColorAt(0.0, self.fillColor1)
gradient.setColorAt(1.0, self.fillColor2)
painter.setBrush(QtGui.QBrush(gradient))
painter.drawPath(self.path)
class Window(QtGui.QWidget):
NumRenderAreas = 9
def __init__(self):
super(Window, self).__init__()
rectPath = QtGui.QPainterPath()
rectPath.moveTo(20.0, 30.0)
rectPath.lineTo(80.0, 30.0)
rectPath.lineTo(80.0, 70.0)
rectPath.lineTo(20.0, 70.0)
rectPath.closeSubpath()
roundRectPath = QtGui.QPainterPath()
roundRectPath.moveTo(80.0, 35.0)
roundRectPath.arcTo(70.0, 30.0, 10.0, 10.0, 0.0, 90.0)
roundRectPath.lineTo(25.0, 30.0)
roundRectPath.arcTo(20.0, 30.0, 10.0, 10.0, 90.0, 90.0)
roundRectPath.lineTo(20.0, 65.0)
roundRectPath.arcTo(20.0, 60.0, 10.0, 10.0, 180.0, 90.0)
roundRectPath.lineTo(75.0, 70.0)
roundRectPath.arcTo(70.0, 60.0, 10.0, 10.0, 270.0, 90.0)
roundRectPath.closeSubpath()
ellipsePath = QtGui.QPainterPath()
ellipsePath.moveTo(80.0, 50.0)
ellipsePath.arcTo(20.0, 30.0, 60.0, 40.0, 0.0, 360.0)
piePath = QtGui.QPainterPath()
piePath.moveTo(50.0, 50.0)
piePath.lineTo(65.0, 32.6795)
piePath.arcTo(20.0, 30.0, 60.0, 40.0, 60.0, 240.0)
piePath.closeSubpath()
polygonPath = QtGui.QPainterPath()
polygonPath.moveTo(10.0, 80.0)
polygonPath.lineTo(20.0, 10.0)
polygonPath.lineTo(80.0, 30.0)
polygonPath.lineTo(90.0, 70.0)
polygonPath.closeSubpath()
groupPath = QtGui.QPainterPath()
groupPath.moveTo(60.0, 40.0)
groupPath.arcTo(20.0, 20.0, 40.0, 40.0, 0.0, 360.0)
groupPath.moveTo(40.0, 40.0)
groupPath.lineTo(40.0, 80.0)
groupPath.lineTo(80.0, 80.0)
groupPath.lineTo(80.0, 40.0)
groupPath.closeSubpath()
textPath = QtGui.QPainterPath()
timesFont = QtGui.QFont("Times", 50)
timesFont.setStyleStrategy(QtGui.QFont.ForceOutline)
textPath.addText(10, 70, timesFont, "Qt")
bezierPath = QtGui.QPainterPath()
bezierPath.moveTo(20, 30)
bezierPath.cubicTo(80, 0, 50, 50, 80, 80)
starPath = QtGui.QPainterPath()
starPath.moveTo(90, 50)
for i in range(1, 5):
starPath.lineTo(50 + 40 * cos(0.8 * i * pi),
50 + 40 * sin(0.8 * i * pi))
starPath.closeSubpath()
self.renderAreas = [RenderArea(rectPath), RenderArea(roundRectPath),
RenderArea(ellipsePath), RenderArea(piePath),
RenderArea(polygonPath), RenderArea(groupPath),
RenderArea(textPath), RenderArea(bezierPath),
RenderArea(starPath)]
assert len(self.renderAreas) == 9
self.fillRuleComboBox = QtGui.QComboBox()
self.fillRuleComboBox.addItem("Odd Even", QtCore.Qt.OddEvenFill)
self.fillRuleComboBox.addItem("Winding", QtCore.Qt.WindingFill)
fillRuleLabel = QtGui.QLabel("Fill &Rule:")
fillRuleLabel.setBuddy(self.fillRuleComboBox)
self.fillColor1ComboBox = QtGui.QComboBox()
self.populateWithColors(self.fillColor1ComboBox)
self.fillColor1ComboBox.setCurrentIndex(
self.fillColor1ComboBox.findText("mediumslateblue"))
self.fillColor2ComboBox = QtGui.QComboBox()
self.populateWithColors(self.fillColor2ComboBox)
self.fillColor2ComboBox.setCurrentIndex(
self.fillColor2ComboBox.findText("cornsilk"))
fillGradientLabel = QtGui.QLabel("&Fill Gradient:")
fillGradientLabel.setBuddy(self.fillColor1ComboBox)
fillToLabel = QtGui.QLabel("to")
fillToLabel.setSizePolicy(QtGui.QSizePolicy.Fixed,
QtGui.QSizePolicy.Fixed)
self.penWidthSpinBox = QtGui.QSpinBox()
self.penWidthSpinBox.setRange(0, 20)
penWidthLabel = QtGui.QLabel("&Pen Width:")
penWidthLabel.setBuddy(self.penWidthSpinBox)
self.penColorComboBox = QtGui.QComboBox()
self.populateWithColors(self.penColorComboBox)
self.penColorComboBox.setCurrentIndex(
self.penColorComboBox.findText('darkslateblue'))
penColorLabel = QtGui.QLabel("Pen &Color:")
penColorLabel.setBuddy(self.penColorComboBox)
self.rotationAngleSpinBox = QtGui.QSpinBox()
self.rotationAngleSpinBox.setRange(0, 359)
self.rotationAngleSpinBox.setWrapping(True)
self.rotationAngleSpinBox.setSuffix('\xB0')
rotationAngleLabel = QtGui.QLabel("&Rotation Angle:")
rotationAngleLabel.setBuddy(self.rotationAngleSpinBox)
self.fillRuleComboBox.activated.connect(self.fillRuleChanged)
self.fillColor1ComboBox.activated.connect(self.fillGradientChanged)
self.fillColor2ComboBox.activated.connect(self.fillGradientChanged)
self.penColorComboBox.activated.connect(self.penColorChanged)
for i in range(Window.NumRenderAreas):
self.penWidthSpinBox.valueChanged.connect(self.renderAreas[i].setPenWidth)
self.rotationAngleSpinBox.valueChanged.connect(self.renderAreas[i].setRotationAngle)
topLayout = QtGui.QGridLayout()
for i in range(Window.NumRenderAreas):
topLayout.addWidget(self.renderAreas[i], i / 3, i % 3)
mainLayout = QtGui.QGridLayout()
mainLayout.addLayout(topLayout, 0, 0, 1, 4)
mainLayout.addWidget(fillRuleLabel, 1, 0)
mainLayout.addWidget(self.fillRuleComboBox, 1, 1, 1, 3)
mainLayout.addWidget(fillGradientLabel, 2, 0)
mainLayout.addWidget(self.fillColor1ComboBox, 2, 1)
mainLayout.addWidget(fillToLabel, 2, 2)
mainLayout.addWidget(self.fillColor2ComboBox, 2, 3)
mainLayout.addWidget(penWidthLabel, 3, 0)
mainLayout.addWidget(self.penWidthSpinBox, 3, 1, 1, 3)
mainLayout.addWidget(penColorLabel, 4, 0)
mainLayout.addWidget(self.penColorComboBox, 4, 1, 1, 3)
mainLayout.addWidget(rotationAngleLabel, 5, 0)
mainLayout.addWidget(self.rotationAngleSpinBox, 5, 1, 1, 3)
self.setLayout(mainLayout)
self.fillRuleChanged()
self.fillGradientChanged()
self.penColorChanged()
self.penWidthSpinBox.setValue(2)
self.setWindowTitle("Painter Paths")
def fillRuleChanged(self):
rule = QtCore.Qt.FillRule(self.currentItemData(self.fillRuleComboBox))
for i in range(Window.NumRenderAreas):
self.renderAreas[i].setFillRule(rule)
def fillGradientChanged(self):
color1 = QtGui.QColor(self.currentItemData(self.fillColor1ComboBox))
color2 = QtGui.QColor(self.currentItemData(self.fillColor2ComboBox))
for i in range(Window.NumRenderAreas):
self.renderAreas[i].setFillGradient(color1, color2)
def penColorChanged(self):
color = QtGui.QColor(self.currentItemData(self.penColorComboBox))
for i in range(Window.NumRenderAreas):
self.renderAreas[i].setPenColor(color)
def populateWithColors(self, comboBox):
colorNames = QtGui.QColor.colorNames()
for name in colorNames:
comboBox.addItem(name, name)
def currentItemData(self, comboBox):
return comboBox.itemData(comboBox.currentIndex())
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
window = Window()
window.show()
sys.exit(app.exec_())
|
mit
| -1,600,070,330,208,654,300
| 34.24031
| 96
| 0.648152
| false
| 3.463619
| false
| false
| false
|
Irrialite/YouTune
|
youtune/api/resources.py
|
1
|
18770
|
from datetime import date, datetime
import hashlib, inspect
from django.db.models import Q
from django.contrib.auth import authenticate, login, logout, models as auth_models
from django.contrib.auth.hashers import make_password
from django.conf.urls import url
from django.utils import timezone
from tastypie import resources, fields
from tastypie.authentication import Authentication
from tastypie.authorization import Authorization
from tastypie.constants import ALL, ALL_WITH_RELATIONS
from tastypie.serializers import Serializer
from tastypie.utils import trailing_slash
from tastypie.http import HttpUnauthorized, HttpForbidden
from youtune.account import models, forms
from youtune.api.helpers import FieldsValidation
from youtune.api.authorization import UserObjectsOnlyAuthorization
from youtune.fileupload import models as file_models
class CommentDateSerializer(Serializer):
def format_datetime(self, data):
if self.datetime_formatting == 'rfc-2822':
return super(CommentDateSerializer, self).format_datetime(data)
return data.isoformat()
class UserProfileResource(resources.ModelResource):
id = fields.IntegerField(attribute="id", null=True)
class Meta:
queryset = models.UserProfile.objects.all()
resource_name = 'userprofile'
# TODO:
# Add custom Authorization (important)
authentication = Authentication()
authorization = Authorization()
# excludes = ['email', 'is_staff', 'is_superuser']
filtering = {
'username': ALL
}
def dehydrate_password(self, bundle):
return ''
def dehydrate(self, bundle):
if bundle.request.user.pk == bundle.obj.pk:
bundle.data['email'] = bundle.obj.email
bundle.data['is_staff'] = bundle.obj.is_staff
bundle.data['is_superuser'] = bundle.obj.is_superuser
model = bundle.obj.channel
ret = {}
for f in sorted(model._meta.fields + model._meta.many_to_many):
ret[f.name] = getattr(model, f.name)
bundle.data['channel'] = ret
return bundle
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/login%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('login'), name="api_login"),
url(r'^(?P<resource_name>%s)/logout%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('logout'), name='api_logout'),
url(r'^(?P<resource_name>%s)/loggedin%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('loggedin'), name='api_loggedin'),
url(r'^(?P<resource_name>%s)/checkfordupe%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('checkfordupe'), name='api_checkfordupe'),
url(r'^(?P<resource_name>%s)/update%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('update'), name='api_update'),
url(r'^(?P<resource_name>%s)/count%s$' %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('count'), name='api_count'),
]
def login(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
username = data.get('username', '')
password = data.get('password', '')
user = authenticate(username=username, password=password)
if user:
if user.is_active:
login(request, user)
return self.create_response(request, {
'success': True
})
else:
return self.create_response(request, {
'success': False,
'reason': 'disabled',
}, HttpForbidden)
else:
return self.create_response(request, {
'success': False,
'reason': 'incorrect',
}, HttpUnauthorized)
def logout(self, request, **kwargs):
self.method_check(request, allowed=['get'])
if request.user and request.user.is_authenticated():
logout(request)
return self.create_response(request, {'success': True})
else:
return self.create_response(request, {'success': False}, HttpUnauthorized)
def hydrate(self, bundle):
# About to do some ninja skills
if bundle.request.method == 'PATCH':
bundle.data['password'] = models.UserProfile.objects.get(pk=int(bundle.data['id'])).password
else:
bundle.data['password'] = make_password(bundle.data['password'])
if bundle.data['birthdate']:
birthdate = bundle.data['birthdate'].split("-")
birthdate = date(year=int(birthdate[0]), month=int(
birthdate[1]), day=int(birthdate[2]))
bundle.data['birthdate'] = birthdate
bundle.data['avatar'] = "http://www.gravatar.com/avatar/" + hashlib.md5(bundle.data['email'].lower()).hexdigest();
return bundle
def loggedin(self, request, **kwargs):
self.method_check(request, allowed=['get'])
if request.user.is_authenticated():
return self.create_response(request, {
'success': True,
'id': request.user.id,
})
else:
return self.create_response(request, {
'success': False
})
def checkfordupe(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
username = data.get('username', '')
user = None;
try:
user = models.UserProfile.objects.get(username__iexact=username)
except models.UserProfile.DoesNotExist:
return self.create_response(request, {
'success': True,
})
else:
return self.create_response(request, {
'success': False,
'id': user.id,
})
def update(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
player_volume = data.get('player_volume', '')
player_autoplay = data.get('player_autoplay', '')
player_repeat = data.get('player_repeat', '')
player_format = data.get('player_format', '')
if request.user:
if request.user.is_authenticated():
user = request.user
user.player_volume = player_volume
user.player_autoplay = player_autoplay
user.player_repeat = player_repeat
user.player_format = player_format
user.save(update_fields=['player_volume',
'player_autoplay',
'player_repeat',
'player_format'])
return self.create_response(request, {
'success': True
})
else:
return self.create_response(request, {
'success': False,
}, HttpForbidden)
else:
return self.create_response(request, {
'success': False,
'reason': 'incorrect',
}, HttpUnauthorized)
def count(self, request, **kwargs):
self.method_check(request, allowed=['get'])
count = models.UserProfile.objects.count()
return self.create_response(request, {
'count': count,
})
def save(self, bundle, skip_errors=False):
bundle = super(UserProfileResource, self).save(bundle, skip_errors)
desc = bundle.obj.username + "'s channel description."
channel = models.Channel(description=desc, owner=bundle.obj)
channel.save()
return bundle
class FileResource(resources.ModelResource):
objects_returned = 0
owner = fields.ForeignKey(UserProfileResource, 'owner')
class Meta:
allowed_methods = ['get']
queryset = file_models.File.objects.all()
resource_name = 'music'
filtering = {
'base64id': ALL,
'upload_date': ALL,
'owner': ALL_WITH_RELATIONS,
'views': ALL,
'lastview_date': ALL,
'query': ['icontains',],
}
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/vote%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('vote'), name="api_vote"),
]
# to sort by descending insert '-' (i.e. '-title')
def apply_sorting(self, objects, options=None):
if options:
if 'sortby' in options:
return objects.order_by(options['sortby'])
return super(FileResource, self).apply_sorting(objects, options)
def vote(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
vote = data.get('vote', '')
base64id = data.get('base64id', '')
userid = data.get('userid', '')
track = None
try:
track = file_models.File.objects.get(base64id__exact=base64id)
user = models.UserProfile.objects.get(pk=userid)
exists = False
if user in track.votes.all():
exists = True
if vote == "like":
track.likes.add(user)
if exists:
track.dislikes.remove(user)
else:
track.dislikes.add(user)
if exists:
track.likes.remove(user)
if not exists:
track.votes.add(user)
except file_models.File.DoesNotExist, models.UserProfile.DoesNotExist:
return self.create_response(request, {
'success': False,
})
else:
return self.create_response(request, {
'success': True,
'dislikes': track.votes.count() - track.likes.count(),
'likes': track.likes.count(),
})
def build_filters(self, filters=None):
if filters is None:
filters = {}
orm_filters = super(FileResource, self).build_filters(filters)
if('query' in filters):
query = filters['query']
query = query.split(' ')
qset = Q()
for q in query:
if len(q.strip()) > 1:
qset &= (
Q(title__icontains=q) |
Q(tags__icontains=q) |
Q(artist__icontains=q)
)
orm_filters.update({'custom': qset})
return orm_filters
def apply_filters(self, request, applicable_filters):
if 'custom' in applicable_filters:
custom = applicable_filters.pop('custom')
else:
custom = None
semi_filtered = super(FileResource, self).apply_filters(request, applicable_filters)
return semi_filtered.filter(custom) if custom else semi_filtered
def dehydrate(self, bundle):
track = file_models.File.objects.get(pk=bundle.data['id'])
bundle.data['likes'] = track.likes.count()
bundle.data['dislikes'] = track.dislikes.count()
if self.objects_returned == 1:
bundle.data['owner'] = bundle.obj.owner.username
bundle.data['avatar'] = bundle.obj.owner.avatar + "?s=64"
if bundle.request.user and bundle.request.user.is_authenticated():
if bundle.request.user in track.likes.all():
bundle.data['voted'] = "like"
elif bundle.request.user in track.dislikes.all():
bundle.data['voted'] = "dislike"
else:
bundle.data['voted'] = "none"
else:
bundle.data['voted'] = "disallowed"
return bundle
def obj_get_list(self, bundle, **kwargs):
"""
A ORM-specific implementation of ``obj_get_list``.
Takes an optional ``request`` object, whose ``GET`` dictionary can be
used to narrow the query.
"""
filters = {}
if hasattr(bundle.request, 'GET'):
# Grab a mutable copy.
filters = bundle.request.GET.copy()
# Update with the provided kwargs.
filters.update(kwargs)
channel = False
if 'owner' in filters:
channel = True
applicable_filters = self.build_filters(filters=filters)
try:
objects = self.apply_filters(bundle.request, applicable_filters)
self.objects_returned = len(objects)
if len(objects) == 1 and applicable_filters and not channel:
obj = objects[0]
obj.views = obj.views + 1
obj.lastview_date = timezone.now()
obj.save(update_fields=['views', 'lastview_date'])
return self.authorized_read_list(objects, bundle)
except ValueError:
raise BadRequest("Invalid resource lookup data provided (mismatched type).")
class ChannelResource(resources.ModelResource):
class Meta:
allowed_methods = []
queryset = models.Channel.objects.all()
resource_name = 'channel'
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/update%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('update'), name="api_update"),
]
def update(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
desc = data.get('description', '')
if request.user:
if request.user.is_authenticated():
channel = request.user.channel;
channel.description = desc;
channel.save(update_fields=['description'])
return self.create_response(request, {
'success': True
})
else:
return self.create_response(request, {
'success': False,
}, HttpForbidden)
else:
return self.create_response(request, {
'success': False,
'reason': 'incorrect',
}, HttpUnauthorized)
class CommentResource(resources.ModelResource):
class Meta:
allowed_methods = ['get']
queryset = file_models.Comment.objects.all()
resource_name = 'comment'
serializer = CommentDateSerializer()
filtering = {
'base64id': ALL,
}
def override_urls(self):
return [
url(r"^(?P<resource_name>%s)/post%s$" %
(self._meta.resource_name, trailing_slash()),
self.wrap_view('post'), name="api_post"),
]
def post(self, request, **kwargs):
self.method_check(request, allowed=['post'])
data = self.deserialize(request, request.raw_post_data,
format=request.META.get('CONTENT_TYPE', 'application/json'))
body = data.get('commenttext', '')
fileid = data.get('fileid', '')
if request.user:
if request.user.is_authenticated():
try:
file = file_models.File.objects.get(pk=fileid)
except file_models.File.DoesNotExist:
return self.create_response(request, {
'success': False,
}, HttpForbidden)
else:
comment = file_models.Comment(owner=request.user, body=body, file=file)
comment.save()
file.comments.add(comment)
return self.create_response(request, {
'success': True,
'date': comment.post_date,
})
else:
return self.create_response(request, {
'success': False,
}, HttpForbidden)
else:
return self.create_response(request, {
'success': False,
'reason': 'incorrect',
}, HttpUnauthorized)
def apply_sorting(self, objects, options=None):
if options:
if 'sortby' in options:
return objects.order_by(options['sortby'])
return super(CommentResource, self).apply_sorting(objects, options)
def dehydrate(self, bundle):
bundle.data['owner'] = bundle.obj.owner.username
bundle.data['avatar'] = bundle.obj.owner.avatar + "?s=64"
return bundle
class UserValidation(FieldsValidation):
def __init__(self):
super(
UserValidation, self).__init__(required=['username', 'first_name', 'last_name'],
validated=['username'],
required_post=[
'email', 'password'],
validated_post=['password'],
)
@staticmethod
def password_is_valid(password, bundle):
if len(password) < 6:
return False, 'Password is too short.'
return True, ""
@staticmethod
def username_is_valid(username, bundle):
try:
user = User.objects.get(username=username)
if user is not None and str(user.id) != str(bundle.data.get('id', 0)):
return False, "The username is already taken."
except User.DoesNotExist:
return True, ""
return True, ""
|
bsd-3-clause
| -6,080,168,689,550,056,000
| 37.150407
| 122
| 0.535962
| false
| 4.495808
| false
| false
| false
|
ver228/tierpsy-tracker
|
tierpsy/debugging/check_default_attrs.py
|
1
|
3346
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 30 16:51:07 2019
@author: lferiani
"""
import glob
import os
import tables
from tierpsy.helper.misc import RESERVED_EXT
from tierpsy.helper.params import set_unit_conversions, read_unit_conversions
from tierpsy import DFLT_PARAMS_PATH, DFLT_PARAMS_FILES
from tierpsy.helper.params import TrackerParams
#script to correct a previous bug in how the expected_fps, microns_per_pixel are saved.
# actually let's first check if files have gone bad!
#%%
params = TrackerParams(os.path.join(DFLT_PARAMS_PATH, '_AEX_RIG.json'))
expected_fps = params.p_dict['expected_fps']
microns_per_pixel = params.p_dict['microns_per_pixel']
#%%
#main_dir = '/Volumes/behavgenom_archive$/Adam/screening'
#fnames = glob.glob(os.path.join(main_dir, '**', '*.hdf5'), recursive=True)
#dname = '/Volumes/behavgenom_archive$/Ida/test_3/**/*.hdf5'
#dname = '/Volumes/behavgenom_archive$/Ida/LoopBio_rig/180222_blue_light/3/**/*.hdf5'
#dname = '/Volumes/behavgenom$/Bertie/singleplatequiescence/**/*.hdf5'
#fnames = glob.glob(dname, recursive=True)
#
#masked_files = [x for x in fnames if not any(x.endswith(ext) for ext in RESERVED_EXT)]
#skeletons_files = [x for x in fnames if x.endswith('_skeletons.hdf5')]
mv_dname = '/Volumes/behavgenom$/Bertie/singleplatequiescence/MaskedVideos/**/*.hdf5'
fnames = glob.glob(mv_dname, recursive=True)
masked_files = [x for x in fnames if not any(x.endswith(ext) for ext in RESERVED_EXT)]
r_dname = '/Volumes/behavgenom$/Bertie/singleplatequiescence/Results/**/*.hdf5'
r_fnames = glob.glob(r_dname, recursive=True)
skeletons_files = [x for x in r_fnames if x.endswith('_skeletons.hdf5')]
#%% check inconsistencies
print('MaskedVideos without skeletons:')
for f in masked_files:
foo = f.replace('MaskedVideos','Results')
foo = foo.replace('.hdf5','_skeletons.hdf5')
if foo not in skeletons_files:
print(f)
print('skeletons without MaskedVideos:')
for f in skeletons_files:
foo = f.replace('Results','MaskedVideos')
foo = foo.replace('_skeletons.hdf5','.hdf5')
if foo not in masked_files:
print(f)
#%%
def check_attrs(fname):
fps_out, microns_per_pixel_out, is_light_background = read_unit_conversions(fname)
if fps_out != (25.0, 25.0, 'seconds') or \
microns_per_pixel_out != (10.0, 'micrometers'):
print('Fix %s' % os.path.basename(fname))
return
for i,fname in enumerate(masked_files):
if i<900:
continue
if i%100==0:
print(i)
try:
check_attrs(fname)
except:
print('Failed to check %s' % fname)
for i,fname in enumerate(skeletons_files):
if i%100==0:
print(i)
try:
check_attrs(fname)
except:
print('Failed to check %s' % fname)
#%%
def change_attrs(fname, field_name):
print(os.path.basename(fname))
read_unit_conversions(fname)
with tables.File(fname, 'r+') as fid:
group_to_save = fid.get_node(field_name)
set_unit_conversions(group_to_save,
expected_fps=expected_fps,
microns_per_pixel=microns_per_pixel)
read_unit_conversions(fname)
#for fname in masked_files:
# change_attrs(fname, '/mask')
#for fname in skeletons_files:
# change_attrs(fname, '/trajectories_data')
|
mit
| -8,318,422,337,788,364,000
| 29.153153
| 87
| 0.668261
| false
| 2.901995
| false
| false
| false
|
kelvinguu/lang2program
|
strongsup/embeddings.py
|
1
|
8329
|
import os
from collections import namedtuple
from os.path import join
import numpy as np
from dependency.data_directory import DataDirectory
from gtd.chrono import verboserate
from gtd.ml.vocab import SimpleVocab, SimpleEmbeddings
from gtd.utils import random_seed, cached_property, ComparableMixin
from strongsup.tables.predicate import WikiTablePredicateType, WikiTablePredicate
from strongsup.tables.world import TableWorld
def emulate_distribution(shape, target_samples, seed=None):
m = np.mean(target_samples)
s = np.std(target_samples)
with random_seed(seed):
samples = np.random.normal(m, s, size=shape)
return samples
class StaticPredicateEmbeddings(SimpleEmbeddings):
"""All base predicate embeddings are initialized with zero vectors."""
def __init__(self, embed_dim, fixed_predicates):
vocab = ContextualPredicateVocab([ContextualPredicate(pred, None) for pred in fixed_predicates])
array = emulate_distribution((len(vocab), embed_dim), GloveEmbeddings(5000).array, seed=0)
super(StaticPredicateEmbeddings, self).__init__(array, vocab)
class TypeEmbeddings(SimpleEmbeddings):
"""All type embeddings are initialized with zero vectors."""
def __init__(self, embed_dim, all_types):
vocab = SimpleVocab(all_types)
array = emulate_distribution((len(vocab), embed_dim), GloveEmbeddings(5000).array, seed=1)
super(TypeEmbeddings, self).__init__(array, vocab)
class RLongPrimitiveEmbeddings(SimpleEmbeddings):
def __init__(self, embed_dim):
OBJECT = 'object'
LIST = 'list'
tokens = [
OBJECT, LIST,
'r', 'y', 'g', 'o', 'p', 'b', 'e', # 7 colors
'color-na', # if an Alchemy beaker is empty or has multiple colors
# TODO(kelvin): change the behavior of RLongAlchemyObject.color to return `color-na`
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, # 0 index is used to represent things that are not visible
-1,
'X1/1',
'0', '1', '2', '3', '4', # Shapes!
]
vocab = SimpleVocab(tokens)
vocab.OBJECT = OBJECT
vocab.LIST = LIST
array = emulate_distribution((len(vocab), embed_dim), GloveEmbeddings(5000).array, seed=3)
super(RLongPrimitiveEmbeddings, self).__init__(array, vocab)
class UtteranceVocab(SimpleVocab):
"""Vocab for input utterances.
IMPORTANT NOTE: UtteranceVocab is blind to casing! All words are converted to lower-case.
An UtteranceVocab is required to have the following special tokens: UNK, PAD
See class attributes for more info.
"""
UNK = u"<unk>"
PAD = u"<pad>"
SPECIAL_TOKENS = (UNK, PAD)
def __init__(self, tokens):
tokens = [t.lower() for t in tokens]
super(UtteranceVocab, self).__init__(tokens)
# check that all special tokens present
for special in self.SPECIAL_TOKENS:
if special not in self._word2index:
raise ValueError('All special tokens must be present in tokens. Missing {}'.format(special))
def word2index(self, w):
"""Map a word to an integer.
If the word is not known to the vocab, return the index for UNK.
"""
sup = super(UtteranceVocab, self)
try:
return sup.word2index(w.lower())
except KeyError:
return sup.word2index(self.UNK)
class GloveEmbeddings(SimpleEmbeddings):
def __init__(self, vocab_size=400000):
"""Load GloveEmbeddings.
Args:
word_vocab_size (int): max # of words in the vocab. If not specified, uses all available GloVe vectors.
Returns:
(np.array, SemgenVocab)
"""
embed_dim = 100
if vocab_size < 5000:
raise ValueError('Need to at least use 5000 words.')
glove_path = join(DataDirectory.glove, 'glove.6B.100d.txt')
download_path = 'http://nlp.stanford.edu/data/glove.6B.zip'
if not os.path.exists(glove_path):
raise RuntimeError('Missing file: {}. Download it here: {}'.format(glove_path, download_path))
# embeddings for special words
words = list(UtteranceVocab.SPECIAL_TOKENS)
num_special = len(words)
embeds = [np.zeros(embed_dim, dtype=np.float32) for _ in words] # zeros are just placeholders for now
with open(glove_path, 'r') as f:
lines = verboserate(f, desc='Loading GloVe embeddings', total=vocab_size, initial=num_special)
for i, line in enumerate(lines, start=num_special):
if i == vocab_size: break
tokens = line.split()
word, embed = tokens[0], np.array([float(tok) for tok in tokens[1:]])
words.append(word)
embeds.append(embed)
vocab = UtteranceVocab(words)
embed_matrix = np.stack(embeds)
special_embeds = emulate_distribution((num_special, embed_dim), embed_matrix[:5000, :], seed=2)
embed_matrix[:num_special, :] = special_embeds
assert embed_matrix.shape[1] == 100
super(GloveEmbeddings, self).__init__(embed_matrix, vocab)
ContextualPredicate = namedtuple('ContextualPredicate', ['predicate', 'utterance'])
# A predicate paired with the utterance it may be mentioned in.
#
# Args:
# predicate (Predicate)
# utterance (Utterance)
class ContextualPredicateVocab(SimpleVocab):
def __init__(self, tokens):
"""Create Vocab.
Args:
tokens (list[ContextualPredicate]): each token is a (Predicate, Context) pair.
"""
for tok in tokens:
if not isinstance(tok, ContextualPredicate):
raise ValueError("Every token must be a ContextualPredicate.")
super(ContextualPredicateVocab, self).__init__(tokens)
class Vocabs(object):
def __init__(self, utterances, domain):
"""Construct Vocabs.
Args:
utterances (frozenset[Utterance]): a frozenset of Utterance objects
"""
assert isinstance(utterances, frozenset)
self._utterance_set = utterances
self._fixed_predicates = domain.fixed_predicates
self._fixed_predicates_set = set(self._fixed_predicates)
def __hash__(self):
return hash(self._utterance_set)
def __eq__(self, other):
if not isinstance(other, Vocabs):
return False
return self._utterance_set == other._utterance_set
@cached_property
def utterances(self):
tokens = sorted(list(self._utterance_set))
return SimpleVocab(tokens)
def as_contextual_pred(self, pred, utterance):
if self.is_static_pred(pred):
utterance = None
return ContextualPredicate(pred, utterance)
def is_static_pred(self, pred):
return pred in self._fixed_predicates_set
@cached_property
def static_preds(self):
return ContextualPredicateVocab([self.as_contextual_pred(pred, None) for pred in self._fixed_predicates])
@cached_property
def dynamic_preds(self):
tokens = set()
for utterance in self._utterance_set:
for pred in utterance.context.predicates:
if not self.is_static_pred(pred):
tokens.add(self.as_contextual_pred(pred, utterance))
# include all entities in the corresponding table
# TODO(kelvin): improve this hack
world = utterance.context.world
if isinstance(world, TableWorld):
graph = world.graph
rows = graph.all_rows
ent_strs = set()
for col_str in graph.all_columns:
ent_strs.update(graph.reversed_join(col_str, rows))
ents = [WikiTablePredicate(s) for s in ent_strs]
tokens.update([self.as_contextual_pred(e, utterance) for e in ents])
# necessary to ensure a deterministic result
tokens = sorted(list(tokens))
return ContextualPredicateVocab(tokens)
@cached_property
def all_preds(self):
static = self.static_preds
dynamic = self.dynamic_preds
joint_tokens = []
joint_tokens.extend(static.tokens)
joint_tokens.extend(dynamic.tokens)
return ContextualPredicateVocab(joint_tokens)
|
apache-2.0
| -25,720,945,844,418,384
| 35.213043
| 115
| 0.629848
| false
| 3.811899
| false
| false
| false
|
rhyolight/nupic.son
|
tests/app/soc/modules/seeder/logic/ndb_models.py
|
1
|
1269
|
# Copyright 2013 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ndb model classes for seeder testing."""
from google.appengine.ext import ndb
from melange.appengine import db as db_util
class NdbDummyModel(ndb.Model):
"""A ndb dummy model class for seeder testing."""
boolean = ndb.BooleanProperty(required=True)
name = ndb.StringProperty(required=True)
link = ndb.StringProperty(required=True, validator=db_util.link_validator)
email = ndb.StringProperty(required=True, validator=db_util.email_validator)
numbers = ndb.IntegerProperty(repeated=True)
class NdbKeyProperty(ndb.Model):
"""A ndb model class with KeyProperty for seeder testing."""
name = ndb.StringProperty(required=True)
key = ndb.KeyProperty(required=True)
|
apache-2.0
| -8,367,067,377,884,833,000
| 36.323529
| 78
| 0.764381
| false
| 3.857143
| false
| false
| false
|
seagatesoft/dateparser
|
tests/test_freshness_date_parser.py
|
1
|
19130
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import six
import unittest
from datetime import datetime, timedelta, date, time
from functools import wraps
from dateutil.relativedelta import relativedelta
from mock import Mock, patch
from nose_parameterized import parameterized, param
from dateparser.date import DateDataParser, freshness_date_parser
from tests import BaseTestCase
class TestFreshnessDateDataParser(BaseTestCase):
def setUp(self):
super(TestFreshnessDateDataParser, self).setUp()
self.now = datetime(2014, 9, 1, 10, 30)
self.date_string = NotImplemented
self.parser = NotImplemented
self.result = NotImplemented
self.freshness_parser = NotImplemented
self.freshness_result = NotImplemented
self.date = NotImplemented
self.time = NotImplemented
@parameterized.expand([
# English dates
param('yesterday', ago={'days': 1}, period='day'),
param('the day before yesterday', ago={'days': 2}, period='day'),
param('today', ago={'days': 0}, period='day'),
param('an hour ago', ago={'hours': 1}, period='day'),
param('about an hour ago', ago={'hours': 1}, period='day'),
param('a day ago', ago={'days': 1}, period='day'),
param('a week ago', ago={'weeks': 1}, period='week'),
param('one week ago', ago={'weeks': 1}, period='week'),
param('2 hours ago', ago={'hours': 2}, period='day'),
param('about 23 hours ago', ago={'hours': 23}, period='day'),
param('1 year 2 months', ago={'years': 1, 'months': 2}, period='month'),
param('1 year, 09 months,01 weeks', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('1 year 11 months', ago={'years': 1, 'months': 11}, period='month'),
param('1 year 12 months', ago={'years': 1, 'months': 12}, period='month'),
param('15 hr', ago={'hours': 15}, period='day'),
param('15 hrs', ago={'hours': 15}, period='day'),
param('2 min', ago={'minutes': 2}, period='day'),
param('2 mins', ago={'minutes': 2}, period='day'),
param('3 sec', ago={'seconds': 3}, period='day'),
param('1000 years ago', ago={'years': 1000}, period='year'),
param('2013 years ago', ago={'years': 2013}, period='year'), # We've fixed .now in setUp
param('5000 months ago', ago={'years': 416, 'months': 8}, period='month'),
param('{} months ago'.format(2013 * 12 + 8), ago={'years': 2013, 'months': 8}, period='month'),
param('1 year, 1 month, 1 week, 1 day, 1 hour and 1 minute ago',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
param('just now', ago={'seconds':0}, period='day'),
# French dates
param("Aujourd'hui", ago={'days': 0}, period='day'),
param("Hier", ago={'days': 1}, period='day'),
param("Avant-hier", ago={'days': 2}, period='day'),
param('Il ya un jour', ago={'days': 1}, period='day'),
param('Il ya une heure', ago={'hours': 1}, period='day'),
param('Il ya 2 heures', ago={'hours': 2}, period='day'),
param('Il ya environ 23 heures', ago={'hours': 23}, period='day'),
param('1 an 2 mois', ago={'years': 1, 'months': 2}, period='month'),
param('1 année, 09 mois, 01 semaines', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('1 an 11 mois', ago={'years': 1, 'months': 11}, period='month'),
param('Il ya 1 an, 1 mois, 1 semaine, 1 jour, 1 heure et 1 minute',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
param('Il y a 40 min', ago={'minutes': 40}, period='day'),
# German dates
param('Heute', ago={'days': 0}, period='day'),
param('Gestern', ago={'days': 1}, period='day'),
param('vorgestern', ago={'days': 2}, period='day'),
param('vor einem Tag', ago={'days': 1}, period='day'),
param('vor einer Stunden', ago={'hours': 1}, period='day'),
param('Vor 2 Stunden', ago={'hours': 2}, period='day'),
param('Vor 2 Stunden', ago={'hours': 2}, period='day'),
param('vor etwa 23 Stunden', ago={'hours': 23}, period='day'),
param('1 Jahr 2 Monate', ago={'years': 1, 'months': 2}, period='month'),
param('1 Jahr, 09 Monate, 01 Wochen', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('1 Jahr 11 Monate', ago={'years': 1, 'months': 11}, period='month'),
param('vor 29h', ago={'hours': 29}, period='day'),
param('vor 29m', ago={'minutes': 29}, period='day'),
param('1 Jahr, 1 Monat, 1 Woche, 1 Tag, 1 Stunde und 1 Minute',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
# Italian dates
param('oggi', ago={'days': 0}, period='day'),
param('ieri', ago={'days': 1}, period='day'),
param('2 ore fa', ago={'hours': 2}, period='day'),
param('circa 23 ore fa', ago={'hours': 23}, period='day'),
param('1 anno 2 mesi', ago={'years': 1, 'months': 2}, period='month'),
param('1 anno, 09 mesi, 01 settimane', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('1 anno 11 mesi', ago={'years': 1, 'months': 11}, period='month'),
param('1 anno, 1 mese, 1 settimana, 1 giorno, 1 ora e 1 minuto fa',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
# Portuguese dates
param('ontem', ago={'days': 1}, period='day'),
param('anteontem', ago={'days': 2}, period='day'),
param('hoje', ago={'days': 0}, period='day'),
param('uma hora atrás', ago={'hours': 1}, period='day'),
param('um dia atrás', ago={'days': 1}, period='day'),
param('uma semana atrás', ago={'weeks': 1}, period='week'),
param('2 horas atrás', ago={'hours': 2}, period='day'),
param('cerca de 23 horas atrás', ago={'hours': 23}, period='day'),
param('1 ano 2 meses', ago={'years': 1, 'months': 2}, period='month'),
param('1 ano, 09 meses, 01 semanas', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('1 ano 11 meses', ago={'years': 1, 'months': 11}, period='month'),
param('1 ano, 1 mês, 1 semana, 1 dia, 1 hora e 1 minuto atrás',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
# Turkish dates
param('Dün', ago={'days': 1}, period='day'),
param('Bugün', ago={'days': 0}, period='day'),
param('2 saat önce', ago={'hours': 2}, period='day'),
param('yaklaşık 23 saat önce', ago={'hours': 23}, period='day'),
param('1 yıl 2 ay', ago={'years': 1, 'months': 2}, period='month'),
param('1 yıl, 09 ay, 01 hafta', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('1 yıl 11 ay', ago={'years': 1, 'months': 11}, period='month'),
param('1 yıl, 1 ay, 1 hafta, 1 gün, 1 saat ve 1 dakika önce',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
# Russian dates
param('сегодня', ago={'days': 0}, period='day'),
param('Вчера в', ago={'days': 1}, period='day'),
param('вчера', ago={'days': 1}, period='day'),
param('2 часа назад', ago={'hours': 2}, period='day'),
param('час назад', ago={'hours': 1}, period='day'),
param('минуту назад', ago={'minutes': 1}, period='day'),
param('2 ч. 21 мин. назад', ago={'hours': 2, 'minutes': 21}, period='day'),
param('около 23 часов назад', ago={'hours': 23}, period='day'),
param('1 год 2 месяца', ago={'years': 1, 'months': 2}, period='month'),
param('1 год, 09 месяцев, 01 недель', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('1 год 11 месяцев', ago={'years': 1, 'months': 11}, period='month'),
param('1 год, 1 месяц, 1 неделя, 1 день, 1 час и 1 минуту назад',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
# Czech dates
param('Dnes', ago={'days': 0}, period='day'),
param('Včera', ago={'days': 1}, period='day'),
param('Předevčírem', ago={'days': 2}, period='day'),
param('Před 2 hodinami', ago={'hours': 2}, period='day'),
param('před přibližně 23 hodin', ago={'hours': 23}, period='day'),
param('1 rok 2 měsíce', ago={'years': 1, 'months': 2}, period='month'),
param('1 rok, 09 měsíců, 01 týdnů', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('1 rok 11 měsíců', ago={'years': 1, 'months': 11}, period='month'),
param('3 dny', ago={'days': 3}, period='day'),
param('3 hodiny', ago={'hours': 3}, period='day'),
param('1 rok, 1 měsíc, 1 týden, 1 den, 1 hodina, 1 minuta před',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
# Spanish dates
param('anteayer', ago={'days': 2}, period='day'),
param('ayer', ago={'days': 1}, period='day'),
param('hoy', ago={'days': 0}, period='day'),
param('hace una hora', ago={'hours': 1}, period='day'),
param('Hace un día', ago={'days': 1}, period='day'),
param('Hace una semana', ago={'weeks': 1}, period='week'),
param('Hace 2 horas', ago={'hours': 2}, period='day'),
param('Hace cerca de 23 horas', ago={'hours': 23}, period='day'),
param('1 año 2 meses', ago={'years': 1, 'months': 2}, period='month'),
param('1 año, 09 meses, 01 semanas', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('1 año 11 meses', ago={'years': 1, 'months': 11}, period='month'),
param('Hace 1 año, 1 mes, 1 semana, 1 día, 1 hora y 1 minuto',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
# Chinese dates
param('昨天', ago={'days': 1}, period='day'),
param('前天', ago={'days': 2}, period='day'),
param('2小时前', ago={'hours': 2}, period='day'),
param('约23小时前', ago={'hours': 23}, period='day'),
param('1年2个月', ago={'years': 1, 'months': 2}, period='month'),
param('1年09月,01周', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('1年11个月', ago={'years': 1, 'months': 11}, period='month'),
param('1年,1月,1周,1天,1小时,1分钟前',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
# Arabic dates
param('اليوم', ago={'days': 0}, period='day'),
param('يوم أمس', ago={'days': 1}, period='day'),
param('منذ يومين', ago={'days': 2}, period='day'),
param('منذ 3 أيام', ago={'days': 3}, period='day'),
param('منذ 21 أيام', ago={'days': 21}, period='day'),
param('1 عام, 1 شهر, 1 أسبوع, 1 يوم, 1 ساعة, 1 دقيقة',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
# Thai dates
param('วันนี้', ago={'days': 0}, period='day'),
param('เมื่อวานนี้', ago={'days': 1}, period='day'),
param('2 วัน', ago={'days': 2}, period='day'),
param('2 ชั่วโมง', ago={'hours': 2}, period='day'),
param('23 ชม.', ago={'hours': 23}, period='day'),
param('2 สัปดาห์ 3 วัน', ago={'weeks': 2, 'days': 3}, period='day'),
param('1 ปี 9 เดือน 1 สัปดาห์', ago={'years': 1, 'months': 9, 'weeks': 1},
period='week'),
param('1 ปี 1 เดือน 1 สัปดาห์ 1 วัน 1 ชั่วโมง 1 นาที',
ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
period='day'),
# Vietnamese dates
param('Hôm nay', ago={'days': 0}, period='day'),
param('Hôm qua', ago={'days': 1}, period='day'),
param('2 giờ', ago={'hours': 2}, period='day'),
param('2 tuần 3 ngày', ago={'weeks': 2, 'days': 3}, period='day'),
# following test unsupported, refer to discussion at:
# http://github.com/scrapinghub/dateparser/issues/33
#param('1 năm 1 tháng 1 tuần 1 ngày 1 giờ 1 chút',
# ago={'years': 1, 'months': 1, 'weeks': 1, 'days': 1, 'hours': 1, 'minutes': 1},
# period='day'),
# Belarusian dates
param('сёння', ago={'days': 0}, period='day'),
param('учора ў', ago={'days': 1}, period='day'),
param('ўчора', ago={'days': 1}, period='day'),
param('пазаўчора', ago={'days': 2}, period='day'),
param('2 гадзіны таму назад', ago={'hours': 2}, period='day'),
param('2 гадзіны таму', ago={'hours': 2}, period='day'),
param('гадзіну назад', ago={'hours': 1}, period='day'),
param('хвіліну таму', ago={'minutes': 1}, period='day'),
param('2 гадзіны 21 хвіл. назад', ago={'hours': 2, 'minutes': 21}, period='day'),
param('каля 23 гадзін назад', ago={'hours': 23}, period='day'),
param('1 год 2 месяцы', ago={'years': 1, 'months': 2}, period='month'),
param('1 год, 09 месяцаў, 01 тыдзень', ago={'years': 1, 'months': 9, 'weeks': 1}, period='week'),
param('2 гады 3 месяцы', ago={'years': 2, 'months': 3}, period='month'),
param('5 гадоў, 1 месяц, 6 тыдняў, 3 дні, 5 гадзін 1 хвіліну і 3 секунды таму назад',
ago={'years': 5, 'months': 1, 'weeks': 6, 'days': 3, 'hours': 5, 'minutes': 1, 'seconds': 3},
period='day'),
])
def test_relative_dates(self, date_string, ago, period):
self.given_parser()
self.given_date_string(date_string)
self.when_date_is_parsed()
self.then_error_was_not_raised()
self.then_date_was_parsed_by_freshness_parser()
self.then_date_obj_is_exactly_this_time_ago(ago)
self.then_period_is(period)
@parameterized.expand([
param('15th of Aug, 2014 Diane Bennett'),
])
def test_insane_dates(self, date_string):
self.given_parser()
self.given_date_string(date_string)
self.when_date_is_parsed()
self.then_error_was_not_raised()
self.then_date_was_not_parsed()
@parameterized.expand([
param('5000 years ago'),
param('2014 years ago'), # We've fixed .now in setUp
param('{} months ago'.format(2013 * 12 + 9)),
])
def test_dates_not_supported_by_date_time(self, date_string):
self.given_parser()
self.given_date_string(date_string)
self.when_date_is_parsed()
self.then_error_was_raised(ValueError, ['year is out of range',
"('year must be in 1..9999'"])
@parameterized.expand([
param('несколько секунд назад', boundary={'seconds': 45}, period='day'),
param('há alguns segundos', boundary={'seconds': 45}, period='day'),
])
def test_inexplicit_dates(self, date_string, boundary, period):
self.given_parser()
self.given_date_string(date_string)
self.when_date_is_parsed()
self.then_error_was_not_raised()
self.then_date_was_parsed_by_freshness_parser()
self.then_period_is(period)
self.then_date_obj_is_between(self.now - timedelta(**boundary), self.now)
@parameterized.expand([
param('Today at 9 pm', date(2014, 9, 1), time(21, 0)),
param('Today at 11:20 am', date(2014, 9, 1), time(11, 20)),
param('Yesterday 1:20 pm', date(2014, 8, 31), time(13, 20)),
param('the day before yesterday 16:50', date(2014, 8, 30), time(16, 50)),
param('2 Tage 18:50', date(2014, 8, 30), time(18, 50)),
param('1 day ago at 2 PM', date(2014, 8, 31), time(14, 0)),
param('Dnes v 12:40', date(2014, 9, 1), time(12, 40)),
])
def test_freshness_date_with_time(self, date_string, date, time):
self.given_parser()
self.given_date_string(date_string)
self.when_date_is_parsed()
self.then_date_is(date)
self.then_time_is(time)
def given_date_string(self, date_string):
self.date_string = date_string
def given_parser(self):
self.add_patch(patch.object(freshness_date_parser, 'now', self.now))
def collecting_get_date_data(get_date_data):
@wraps(get_date_data)
def wrapped(date_string):
self.freshness_result = get_date_data(date_string)
return self.freshness_result
return wrapped
self.add_patch(patch.object(freshness_date_parser,
'get_date_data',
collecting_get_date_data(freshness_date_parser.get_date_data)))
self.freshness_parser = Mock(wraps=freshness_date_parser)
self.add_patch(patch('dateparser.date.freshness_date_parser', new=self.freshness_parser))
self.parser = DateDataParser()
def when_date_is_parsed(self):
try:
self.result = self.parser.get_date_data(self.date_string)
except Exception as error:
self.error = error
def then_date_is(self, date):
self.assertEqual(date, self.result['date_obj'].date())
def then_time_is(self, time):
self.assertEqual(time, self.result['date_obj'].time())
def then_period_is(self, period):
self.assertEqual(period, self.result['period'])
def then_date_obj_is_between(self, low_boundary, high_boundary):
self.assertGreater(self.result['date_obj'], low_boundary)
self.assertLess(self.result['date_obj'], high_boundary)
def then_date_obj_is_exactly_this_time_ago(self, ago):
self.assertEqual(self.now - relativedelta(**ago), self.result['date_obj'])
def then_date_was_not_parsed(self):
self.assertIsNone(self.result['date_obj'], '"%s" should not be parsed' % self.date_string)
def then_date_was_parsed_by_freshness_parser(self):
self.assertEqual(self.result, self.freshness_result)
def then_error_was_not_raised(self):
self.assertEqual(NotImplemented, self.error)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
| -581,672,396,525,162,600
| 50.619048
| 107
| 0.548567
| false
| 3.017521
| true
| false
| false
|
jasondunsmore/heat
|
heat/engine/resources/openstack/nova/flavor.py
|
1
|
5017
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common.i18n import _
from heat.engine import attributes
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
class NovaFlavor(resource.Resource):
"""A resource for creating OpenStack virtual hardware templates.
Due to default nova security policy usage of this resource is limited to
being used by administrators only. The rights may also be delegated to
other users by redefining the access controls on the nova-api server.
Note that the current implementation of the Nova Flavor resource does not
allow specifying the name and flavorid properties for the resource.
This is done to avoid potential naming collision upon flavor creation as
all flavor have a global scope.
"""
support_status = support.SupportStatus(version='2014.2')
default_client_name = 'nova'
required_service_extension = 'os-flavor-manage'
entity = 'flavors'
PROPERTIES = (
RAM, VCPUS, DISK, SWAP, EPHEMERAL,
RXTX_FACTOR, EXTRA_SPECS, IS_PUBLIC
) = (
'ram', 'vcpus', 'disk', 'swap', 'ephemeral',
'rxtx_factor', 'extra_specs', 'is_public',
)
ATTRIBUTES = (
IS_PUBLIC_ATTR,
) = (
'is_public',
)
properties_schema = {
RAM: properties.Schema(
properties.Schema.INTEGER,
_('Memory in MB for the flavor.'),
required=True
),
VCPUS: properties.Schema(
properties.Schema.INTEGER,
_('Number of VCPUs for the flavor.'),
required=True
),
DISK: properties.Schema(
properties.Schema.INTEGER,
_('Size of local disk in GB. The "0" size is a special case that '
'uses the native base image size as the size of the ephemeral '
'root volume.'),
default=0
),
SWAP: properties.Schema(
properties.Schema.INTEGER,
_('Swap space in MB.'),
default=0
),
EPHEMERAL: properties.Schema(
properties.Schema.INTEGER,
_('Size of a secondary ephemeral data disk in GB.'),
default=0
),
RXTX_FACTOR: properties.Schema(
properties.Schema.NUMBER,
_('RX/TX factor.'),
default=1.0
),
EXTRA_SPECS: properties.Schema(
properties.Schema.MAP,
_('Key/Value pairs to extend the capabilities of the flavor.'),
update_allowed=True,
),
IS_PUBLIC: properties.Schema(
properties.Schema.BOOLEAN,
_('Scope of flavor accessibility. Public or private. '
'Default value is True, means public, shared '
'across all projects.'),
default=True,
support_status=support.SupportStatus(version='6.0.0'),
),
}
attributes_schema = {
IS_PUBLIC_ATTR: attributes.Schema(
_('Whether the flavor is shared across all projects.'),
support_status=support.SupportStatus(version='6.0.0'),
type=attributes.Schema.BOOLEAN
),
}
def handle_create(self):
args = dict(self.properties)
args['flavorid'] = 'auto'
args['name'] = self.physical_resource_name()
flavor_keys = args.pop(self.EXTRA_SPECS)
flavor = self.client().flavors.create(**args)
self.resource_id_set(flavor.id)
if flavor_keys:
flavor.set_keys(flavor_keys)
tenant = self.stack.context.tenant_id
if not args['is_public']:
# grant access only to the active project(private flavor)
self.client().flavor_access.add_tenant_access(flavor, tenant)
def handle_update(self, json_snippet, tmpl_diff, prop_diff):
"""Update nova flavor."""
if self.EXTRA_SPECS in prop_diff:
flavor = self.client().flavors.get(self.resource_id)
old_keys = flavor.get_keys()
flavor.unset_keys(old_keys)
new_keys = prop_diff.get(self.EXTRA_SPECS)
if new_keys is not None:
flavor.set_keys(new_keys)
def _resolve_attribute(self, name):
flavor = self.client().flavors.get(self.resource_id)
if name == self.IS_PUBLIC_ATTR:
return getattr(flavor, name)
def resource_mapping():
return {
'OS::Nova::Flavor': NovaFlavor
}
|
apache-2.0
| 4,630,381,481,356,611,000
| 32.898649
| 78
| 0.606936
| false
| 4.233755
| false
| false
| false
|
Lokke/eden
|
modules/s3db/cap.py
|
1
|
110403
|
# -*- coding: utf-8 -*-
""" Sahana Eden Common Alerting Protocol (CAP) Model
@copyright: 2009-2015 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3CAPModel",
"S3CAPAreaNameModel",
"cap_info_labels",
"cap_alert_is_template",
"cap_rheader",
"cap_alert_list_layout",
"add_area_from_template",
"cap_AssignArea",
"cap_AreaRepresent",
#"cap_gis_location_xml_post_parse",
#"cap_gis_location_xml_post_render",
)
import datetime
import urllib2 # Needed for quoting & error handling on fetch
try:
from cStringIO import StringIO # Faster, where available
except:
from StringIO import StringIO
from gluon import *
from gluon.storage import Storage
from gluon.tools import fetch
from ..s3 import *
# =============================================================================
class S3CAPModel(S3Model):
"""
CAP: Common Alerting Protocol
- this module is a non-functional stub
http://eden.sahanafoundation.org/wiki/BluePrint/Messaging#CAP
"""
names = ("cap_alert",
"cap_alert_represent",
"cap_warning_priority",
"cap_info",
"cap_info_represent",
"cap_resource",
"cap_area",
"cap_area_id",
"cap_area_represent",
"cap_area_location",
"cap_area_tag",
"cap_info_category_opts",
"cap_template_represent",
)
def model(self):
T = current.T
db = current.db
settings = current.deployment_settings
add_components = self.add_components
configure = self.configure
crud_strings = current.response.s3.crud_strings
define_table = self.define_table
UNKNOWN_OPT = current.messages.UNKNOWN_OPT
# ---------------------------------------------------------------------
# List of Incident Categories -- copied from irs module <--
# @ToDo: Switch to using event_incident_type
#
# The keys are based on the Canadian ems.incident hierarchy, with a
# few extra general versions added to 'other'
# The values are meant for end-users, so can be customised as-required
# NB It is important that the meaning of these entries is not changed
# as otherwise this hurts our ability to do synchronisation
# Entries can be hidden from user view in the controller.
# Additional sets of 'translations' can be added to the tuples.
cap_incident_type_opts = {
"animalHealth.animalDieOff": T("Animal Die Off"),
"animalHealth.animalFeed": T("Animal Feed"),
"aviation.aircraftCrash": T("Aircraft Crash"),
"aviation.aircraftHijacking": T("Aircraft Hijacking"),
"aviation.airportClosure": T("Airport Closure"),
"aviation.airspaceClosure": T("Airspace Closure"),
"aviation.noticeToAirmen": T("Notice to Airmen"),
"aviation.spaceDebris": T("Space Debris"),
"civil.demonstrations": T("Demonstrations"),
"civil.dignitaryVisit": T("Dignitary Visit"),
"civil.displacedPopulations": T("Displaced Populations"),
"civil.emergency": T("Civil Emergency"),
"civil.looting": T("Looting"),
"civil.publicEvent": T("Public Event"),
"civil.riot": T("Riot"),
"civil.volunteerRequest": T("Volunteer Request"),
"crime": T("Crime"),
"crime.bomb": T("Bomb"),
"crime.bombExplosion": T("Bomb Explosion"),
"crime.bombThreat": T("Bomb Threat"),
"crime.dangerousPerson": T("Dangerous Person"),
"crime.drugs": T("Drugs"),
"crime.homeCrime": T("Home Crime"),
"crime.illegalImmigrant": T("Illegal Immigrant"),
"crime.industrialCrime": T("Industrial Crime"),
"crime.poisoning": T("Poisoning"),
"crime.retailCrime": T("Retail Crime"),
"crime.shooting": T("Shooting"),
"crime.stowaway": T("Stowaway"),
"crime.terrorism": T("Terrorism"),
"crime.vehicleCrime": T("Vehicle Crime"),
"fire": T("Fire"),
"fire.forestFire": T("Forest Fire"),
"fire.hotSpot": T("Hot Spot"),
"fire.industryFire": T("Industry Fire"),
"fire.smoke": T("Smoke"),
"fire.urbanFire": T("Urban Fire"),
"fire.wildFire": T("Wild Fire"),
"flood": T("Flood"),
"flood.damOverflow": T("Dam Overflow"),
"flood.flashFlood": T("Flash Flood"),
"flood.highWater": T("High Water"),
"flood.overlandFlowFlood": T("Overland Flow Flood"),
"flood.tsunami": T("Tsunami"),
"geophysical.avalanche": T("Avalanche"),
"geophysical.earthquake": T("Earthquake"),
"geophysical.lahar": T("Lahar"),
"geophysical.landslide": T("Landslide"),
"geophysical.magneticStorm": T("Magnetic Storm"),
"geophysical.meteorite": T("Meteorite"),
"geophysical.pyroclasticFlow": T("Pyroclastic Flow"),
"geophysical.pyroclasticSurge": T("Pyroclastic Surge"),
"geophysical.volcanicAshCloud": T("Volcanic Ash Cloud"),
"geophysical.volcanicEvent": T("Volcanic Event"),
"hazardousMaterial": T("Hazardous Material"),
"hazardousMaterial.biologicalHazard": T("Biological Hazard"),
"hazardousMaterial.chemicalHazard": T("Chemical Hazard"),
"hazardousMaterial.explosiveHazard": T("Explosive Hazard"),
"hazardousMaterial.fallingObjectHazard": T("Falling Object Hazard"),
"hazardousMaterial.infectiousDisease": T("Infectious Disease (Hazardous Material)"),
"hazardousMaterial.poisonousGas": T("Poisonous Gas"),
"hazardousMaterial.radiologicalHazard": T("Radiological Hazard"),
"health.infectiousDisease": T("Infectious Disease"),
"health.infestation": T("Infestation"),
"ice.iceberg": T("Iceberg"),
"ice.icePressure": T("Ice Pressure"),
"ice.rapidCloseLead": T("Rapid Close Lead"),
"ice.specialIce": T("Special Ice"),
"marine.marineSecurity": T("Marine Security"),
"marine.nauticalAccident": T("Nautical Accident"),
"marine.nauticalHijacking": T("Nautical Hijacking"),
"marine.portClosure": T("Port Closure"),
"marine.specialMarine": T("Special Marine"),
"meteorological.blizzard": T("Blizzard"),
"meteorological.blowingSnow": T("Blowing Snow"),
"meteorological.drought": T("Drought"),
"meteorological.dustStorm": T("Dust Storm"),
"meteorological.fog": T("Fog"),
"meteorological.freezingDrizzle": T("Freezing Drizzle"),
"meteorological.freezingRain": T("Freezing Rain"),
"meteorological.freezingSpray": T("Freezing Spray"),
"meteorological.hail": T("Hail"),
"meteorological.hurricane": T("Hurricane"),
"meteorological.rainFall": T("Rain Fall"),
"meteorological.snowFall": T("Snow Fall"),
"meteorological.snowSquall": T("Snow Squall"),
"meteorological.squall": T("Squall"),
"meteorological.stormSurge": T("Storm Surge"),
"meteorological.thunderstorm": T("Thunderstorm"),
"meteorological.tornado": T("Tornado"),
"meteorological.tropicalStorm": T("Tropical Storm"),
"meteorological.waterspout": T("Waterspout"),
"meteorological.winterStorm": T("Winter Storm"),
"missingPerson": T("Missing Person"),
# http://en.wikipedia.org/wiki/Amber_Alert
"missingPerson.amberAlert": T("Child Abduction Emergency"),
"missingPerson.missingVulnerablePerson": T("Missing Vulnerable Person"),
# http://en.wikipedia.org/wiki/Silver_Alert
"missingPerson.silver": T("Missing Senior Citizen"),
"publicService.emergencySupportFacility": T("Emergency Support Facility"),
"publicService.emergencySupportService": T("Emergency Support Service"),
"publicService.schoolClosure": T("School Closure"),
"publicService.schoolLockdown": T("School Lockdown"),
"publicService.serviceOrFacility": T("Service or Facility"),
"publicService.transit": T("Transit"),
"railway.railwayAccident": T("Railway Accident"),
"railway.railwayHijacking": T("Railway Hijacking"),
"roadway.bridgeClosure": T("Bridge Closed"),
"roadway.hazardousRoadConditions": T("Hazardous Road Conditions"),
"roadway.roadwayAccident": T("Road Accident"),
"roadway.roadwayClosure": T("Road Closed"),
"roadway.roadwayDelay": T("Road Delay"),
"roadway.roadwayHijacking": T("Road Hijacking"),
"roadway.roadwayUsageCondition": T("Road Usage Condition"),
"roadway.trafficReport": T("Traffic Report"),
"temperature.arcticOutflow": T("Arctic Outflow"),
"temperature.coldWave": T("Cold Wave"),
"temperature.flashFreeze": T("Flash Freeze"),
"temperature.frost": T("Frost"),
"temperature.heatAndHumidity": T("Heat and Humidity"),
"temperature.heatWave": T("Heat Wave"),
"temperature.windChill": T("Wind Chill"),
"wind.galeWind": T("Gale Wind"),
"wind.hurricaneForceWind": T("Hurricane Force Wind"),
"wind.stormForceWind": T("Storm Force Wind"),
"wind.strongWind": T("Strong Wind"),
"other.buildingCollapsed": T("Building Collapsed"),
"other.peopleTrapped": T("People Trapped"),
"other.powerFailure": T("Power Failure"),
}
# ---------------------------------------------------------------------
# CAP alerts
#
# CAP alert Status Code (status)
cap_alert_status_code_opts = OrderedDict([
("Actual", T("Actual - actionable by all targeted recipients")),
("Exercise", T("Exercise - only for designated participants (decribed in note)")),
("System", T("System - for internal functions")),
("Test", T("Test - testing, all recipients disregard")),
("Draft", T("Draft - not actionable in its current form")),
])
# CAP alert message type (msgType)
cap_alert_msgType_code_opts = OrderedDict([
("Alert", T("Alert: Initial information requiring attention by targeted recipients")),
("Update", T("Update: Update and supercede earlier message(s)")),
("Cancel", T("Cancel: Cancel earlier message(s)")),
("Ack", T("Ack: Acknowledge receipt and acceptance of the message(s)")),
("Error", T("Error: Indicate rejection of the message(s)")),
])
# CAP alert scope
cap_alert_scope_code_opts = OrderedDict([
("Public", T("Public - unrestricted audiences")),
("Restricted", T("Restricted - to users with a known operational requirement (described in restriction)")),
("Private", T("Private - only to specified addresses (mentioned as recipients)"))
])
# CAP info categories
cap_info_category_opts = OrderedDict([
("Geo", T("Geophysical (inc. landslide)")),
("Met", T("Meteorological (inc. flood)")),
("Safety", T("General emergency and public safety")),
("Security", T("Law enforcement, military, homeland and local/private security")),
("Rescue", T("Rescue and recovery")),
("Fire", T("Fire suppression and rescue")),
("Health", T("Medical and public health")),
("Env", T("Pollution and other environmental")),
("Transport", T("Public and private transportation")),
("Infra", T("Utility, telecommunication, other non-transport infrastructure")),
("CBRNE", T("Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack")),
("Other", T("Other events")),
])
tablename = "cap_alert"
define_table(tablename,
Field("is_template", "boolean",
readable = False,
writable = True,
),
Field("template_id", "reference cap_alert",
label = T("Template"),
ondelete = "RESTRICT",
represent = self.cap_template_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cap_alert.id",
self.cap_template_represent,
filterby="is_template",
filter_opts=(True,)
)),
comment = T("Apply a template"),
),
Field("template_title",
label = T("Template Title"),
),
Field("template_settings", "text",
default = "{}",
readable = False,
),
Field("identifier", unique=True, length=128,
default = self.generate_identifier,
label = T("Identifier"),
),
Field("sender",
label = T("Sender"),
default = self.generate_sender,
# @todo: can not be empty in alerts (validator!)
),
s3_datetime("sent",
default = "now",
writable = False,
),
Field("status",
default = "Draft",
label = T("Status"),
represent = lambda opt: \
cap_alert_status_code_opts.get(opt, UNKNOWN_OPT),
requires = IS_IN_SET(cap_alert_status_code_opts),
),
Field("msg_type",
label = T("Message Type"),
represent = lambda opt: \
cap_alert_msgType_code_opts.get(opt, UNKNOWN_OPT),
requires = IS_EMPTY_OR(
IS_IN_SET(cap_alert_msgType_code_opts)
),
),
Field("source",
label = T("Source"),
default = self.generate_source,
),
Field("scope",
label = T("Scope"),
represent = lambda opt: \
cap_alert_scope_code_opts.get(opt, UNKNOWN_OPT),
requires = IS_EMPTY_OR(
IS_IN_SET(cap_alert_scope_code_opts)
),
),
# Text describing the restriction for scope=restricted
Field("restriction", "text",
label = T("Restriction"),
),
Field("addresses", "list:string",
label = T("Recipients"),
represent = self.list_string_represent,
#@ToDo: provide a better way to add multiple addresses,
# do not ask the user to delimit it themselves
# this should eventually use the CAP contacts
#widget = S3CAPAddressesWidget,
),
Field("codes", "list:string",
default = settings.get_cap_codes(),
label = T("Codes"),
represent = self.list_string_represent,
),
Field("note", "text",
label = T("Note"),
),
Field("reference", "list:reference cap_alert",
label = T("Reference"),
represent = S3Represent(lookup = tablename,
fields = ["msg_type", "sent", "sender"],
field_sep = " - ",
multiple = True,
),
# @ToDo: This should not be manually entered,
# needs a widget
#widget = S3ReferenceWidget(table,
# one_to_many=True,
# allow_create=False),
),
# @ToDo: Switch to using event_incident_type_id
Field("incidents", "list:string",
label = T("Incidents"),
represent = S3Represent(options = cap_incident_type_opts,
multiple = True),
requires = IS_EMPTY_OR(
IS_IN_SET(cap_incident_type_opts,
multiple = True,
sort = True,
)),
widget = S3MultiSelectWidget(selectedList = 10),
),
# approved_on field for recording when the alert was approved
s3_datetime("approved_on",
readable = False,
writable = False,
),
*s3_meta_fields())
list_fields = [(T("Sent"), "sent"),
"scope",
"info.priority",
"info.event_type_id",
"info.sender_name",
"area.name",
]
notify_fields = [(T("Identifier"), "identifier"),
(T("Date"), "sent"),
(T("Status"), "status"),
(T("Message Type"), "msg_type"),
(T("Source"), "source"),
(T("Scope"), "scope"),
(T("Restriction"), "restriction"),
(T("Category"), "info.category"),
(T("Event"), "info.event_type_id"),
(T("Response type"), "info.response_type"),
(T("Priority"), "info.priority"),
(T("Urgency"), "info.urgency"),
(T("Severity"), "info.severity"),
(T("Certainty"), "info.certainty"),
(T("Effective"), "info.effective"),
(T("Expires at"), "info.expires"),
(T("Sender's name"), "info.sender_name"),
(T("Headline"), "info.headline"),
(T("Description"), "info.description"),
(T("Instruction"), "info.instruction"),
(T("Contact information"), "info.contact"),
(T("URL"), "info.web"),
(T("Area Description"), "area.name"),
]
filter_widgets = [
# @ToDo: Radio Button to choose between alert expired, unexpired and all
S3TextFilter(["identifier",
"sender",
"incidents",
"cap_info.headline",
"cap_info.event",
],
label = T("Search"),
comment = T("Search for an Alert by sender, incident, headline or event."),
),
S3OptionsFilter("info.category",
label = T("Category"),
options = cap_info_category_opts,
),
S3OptionsFilter("info.event_type_id",
),
S3OptionsFilter("info.priority",
),
S3LocationFilter("location.location_id",
label = T("Location(s)"),
# options = gis.get_countries().keys(),
),
S3OptionsFilter("info.language",
label = T("Language"),
),
]
configure(tablename,
context = {"location": "location.location_id",
},
filter_widgets = filter_widgets,
list_fields = list_fields,
list_layout = cap_alert_list_layout,
list_orderby = "cap_info.expires desc",
notify_fields = notify_fields,
onvalidation = self.cap_alert_form_validation,
# update the approved_on field on approve of the alert
onapprove = self.cap_alert_approve,
orderby = "cap_info.expires desc",
)
# Components
add_components(tablename,
cap_area = "alert_id",
cap_area_location = {"name": "location",
"joinby": "alert_id",
},
cap_area_tag = {"name": "tag",
"joinby": "alert_id",
},
cap_info = "alert_id",
cap_resource = "alert_id",
)
self.set_method("cap", "alert",
method = "import_feed",
action = CAPImportFeed())
self.set_method("cap", "alert",
method = "assign",
action = self.cap_AssignArea())
if crud_strings["cap_template"]:
crud_strings[tablename] = crud_strings["cap_template"]
else:
ADD_ALERT = T("Create Alert")
crud_strings[tablename] = Storage(
label_create = ADD_ALERT,
title_display = T("Alert Details"),
title_list = T("Alerts"),
# If already-published, this should create a new "Update"
# alert instead of modifying the original
title_update = T("Edit Alert"),
title_upload = T("Import Alerts"),
label_list_button = T("List Alerts"),
label_delete_button = T("Delete Alert"),
msg_record_created = T("Alert created"),
msg_record_modified = T("Alert modified"),
msg_record_deleted = T("Alert deleted"),
msg_list_empty = T("No alerts to show"))
alert_represent = S3Represent(lookup = tablename,
fields = ["msg_type", "sent", "sender"],
field_sep = " - ")
alert_id = S3ReusableField("alert_id", "reference %s" % tablename,
comment = T("The alert message containing this information"),
label = T("Alert"),
ondelete = "CASCADE",
represent = alert_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cap_alert.id",
alert_represent)),
)
# ---------------------------------------------------------------------
# CAP info segments
#
cap_info_responseType_opts = OrderedDict([
("Shelter", T("Shelter - Take shelter in place or per instruction")),
("Evacuate", T("Evacuate - Relocate as instructed in the instruction")),
("Prepare", T("Prepare - Make preparations per the instruction")),
("Execute", T("Execute - Execute a pre-planned activity identified in instruction")),
("Avoid", T("Avoid - Avoid the subject event as per the instruction")),
("Monitor", T("Monitor - Attend to information sources as described in instruction")),
("Assess", T("Assess - Evaluate the information in this message.")),
("AllClear", T("AllClear - The subject event no longer poses a threat")),
("None", T("None - No action recommended")),
])
cap_info_urgency_opts = OrderedDict([
("Immediate", T("Response action should be taken immediately")),
("Expected", T("Response action should be taken soon (within next hour)")),
("Future", T("Responsive action should be taken in the near future")),
("Past", T("Responsive action is no longer required")),
("Unknown", T("Unknown")),
])
cap_info_severity_opts = OrderedDict([
("Extreme", T("Extraordinary threat to life or property")),
("Severe", T("Significant threat to life or property")),
("Moderate", T("Possible threat to life or property")),
("Minor", T("Minimal to no known threat to life or property")),
("Unknown", T("Severity unknown")),
])
cap_info_certainty_opts = OrderedDict([
("Observed", T("Observed: determined to have occurred or to be ongoing")),
("Likely", T("Likely (p > ~50%)")),
("Possible", T("Possible but not likely (p <= ~50%)")),
("Unlikely", T("Not expected to occur (p ~ 0)")),
("Unknown", T("Certainty unknown")),
])
# ---------------------------------------------------------------------
# Warning Priorities for CAP
tablename = "cap_warning_priority"
define_table(tablename,
Field("priority_rank", "integer",
label = T("Priority Rank"),
length = 2,
),
Field("event_code",
label = T("Event Code"),
),
Field("name", notnull=True, length=64,
label = T("Name"),
),
Field("event_type",
label = T("Event Type"),
),
Field("urgency",
label = T("Urgency"),
requires = IS_IN_SET(cap_info_urgency_opts),
),
Field("severity",
label = T("Severity"),
requires = IS_IN_SET(cap_info_severity_opts),
),
Field("certainty",
label = T("Certainty"),
requires = IS_IN_SET(cap_info_certainty_opts),
),
Field("color_code",
label = T("Color Code"),
),
*s3_meta_fields())
priority_represent = S3Represent(lookup=tablename)
crud_strings[tablename] = Storage(
label_create = T("Create Warning Priority"),
title_display = T("Warning Priority Details"),
title_list = T("Warning Priorities"),
title_update = T("Edit Warning Priority"),
title_upload = T("Import Warning Priorities"),
label_list_button = T("List Warning Priorities"),
label_delete_button = T("Delete Warning Priority"),
msg_record_created = T("Warning Priority added"),
msg_record_modified = T("Warning Priority updated"),
msg_record_deleted = T("Warning Priority removed"),
msg_list_empty = T("No Warning Priorities currently registered")
)
configure(tablename,
deduplicate = S3Duplicate(primary=("event_type", "name")),
)
# ---------------------------------------------------------------------
# CAP info priority
# @ToDo: i18n: Need label=T("")
languages = settings.get_cap_languages()
tablename = "cap_info"
define_table(tablename,
alert_id(),
Field("is_template", "boolean",
default = False,
readable = False,
writable = False,
),
Field("template_info_id", "reference cap_info",
ondelete = "RESTRICT",
readable = False,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cap_info.id",
self.cap_template_represent,
filterby="is_template",
filter_opts=(True,)
)),
widget = S3HiddenWidget(),
),
Field("template_settings", "text",
readable = False,
),
Field("language",
default = "en-US",
represent = lambda opt: languages.get(opt,
UNKNOWN_OPT),
requires = IS_EMPTY_OR(
IS_IN_SET(languages)
),
),
Field("category", "list:string", # 1 or more allowed
represent = S3Represent(options = cap_info_category_opts,
multiple = True,
),
required = True,
requires = IS_IN_SET(cap_info_category_opts,
multiple = True,
),
widget = S3MultiSelectWidget(selectedList = 10),
),
Field("event", "text"),
self.event_type_id(empty = False,
script = '''
$.filterOptionsS3({
'trigger':'event_type_id',
'target':'priority',
'lookupURL':S3.Ap.concat('/cap/priority_get/'),
'lookupResource':'event_type'
})'''
),
Field("response_type", "list:string", # 0 or more allowed
represent = S3Represent(options = cap_info_responseType_opts,
multiple = True,
),
requires = IS_IN_SET(cap_info_responseType_opts,
multiple = True),
widget = S3MultiSelectWidget(selectedList = 10),
),
Field("priority", "reference cap_warning_priority",
represent = priority_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cap_warning_priority.id",
priority_represent
),
),
),
Field("urgency",
represent = lambda opt: \
cap_info_urgency_opts.get(opt, UNKNOWN_OPT),
requires = IS_IN_SET(cap_info_urgency_opts),
),
Field("severity",
represent = lambda opt: \
cap_info_severity_opts.get(opt, UNKNOWN_OPT),
requires = IS_IN_SET(cap_info_severity_opts),
),
Field("certainty",
represent = lambda opt: \
cap_info_certainty_opts.get(opt, UNKNOWN_OPT),
requires = IS_IN_SET(cap_info_certainty_opts),
),
Field("audience", "text"),
Field("event_code", "text",
default = settings.get_cap_event_codes(),
represent = S3KeyValueWidget.represent,
widget = S3KeyValueWidget(),
),
s3_datetime("effective",
default = "now",
),
s3_datetime("onset"),
s3_datetime("expires",
past = 0,
default = self.get_expirydate,
),
Field("sender_name"),
Field("headline"),
Field("description", "text"),
Field("instruction", "text"),
Field("contact", "text"),
Field("web",
requires = IS_EMPTY_OR(IS_URL()),
),
Field("parameter", "text",
default = settings.get_cap_parameters(),
label = T("Parameters"),
represent = S3KeyValueWidget.represent,
widget = S3KeyValueWidget(),
),
*s3_meta_fields())
# @ToDo: Move labels into main define_table (can then be lazy & performs better anyway)
info_labels = cap_info_labels()
for field in info_labels:
db.cap_info[field].label = info_labels[field]
if crud_strings["cap_template_info"]:
crud_strings[tablename] = crud_strings["cap_template_info"]
else:
ADD_INFO = T("Add alert information")
crud_strings[tablename] = Storage(
label_create = ADD_INFO,
title_display = T("Alert information"),
title_list = T("Information entries"),
title_update = T("Update alert information"), # this will create a new "Update" alert?
title_upload = T("Import alert information"),
subtitle_list = T("Listing of alert information items"),
label_list_button = T("List information entries"),
label_delete_button = T("Delete Information"),
msg_record_created = T("Alert information created"),
msg_record_modified = T("Alert information modified"),
msg_record_deleted = T("Alert information deleted"),
msg_list_empty = T("No alert information to show"))
info_represent = S3Represent(lookup = tablename,
fields = ["language", "headline"],
field_sep = " - ")
info_id = S3ReusableField("info_id", "reference %s" % tablename,
label = T("Information Segment"),
ondelete = "CASCADE",
represent = info_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(db, "cap_info.id",
info_represent)
),
sortby = "identifier",
)
configure(tablename,
#create_next = URL(f="info", args=["[id]", "area"]),
onaccept = self.info_onaccept,
)
# Components
add_components(tablename,
cap_resource = "info_id",
cap_area = "info_id",
)
# ---------------------------------------------------------------------
# CAP Resource segments
#
# Resource elements sit inside the Info segment of the export XML
# - however in most cases these would be common across all Infos, so in
# our internal UI we link these primarily to the Alert but still
# allow the option to differentiate by Info
#
tablename = "cap_resource"
define_table(tablename,
alert_id(writable = False,
),
info_id(),
self.super_link("doc_id", "doc_entity"),
Field("resource_desc",
requires = IS_NOT_EMPTY(),
),
Field("mime_type",
requires = IS_NOT_EMPTY(),
),
Field("size", "integer",
writable = False,
),
Field("uri",
# needs a special validation
writable = False,
),
#Field("file", "upload"),
Field("deref_uri", "text",
readable = False,
writable = False,
),
Field("digest",
writable = False,
),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Resource"),
title_display = T("Alert Resource"),
title_list = T("Resources"),
title_update = T("Edit Resource"),
subtitle_list = T("List Resources"),
label_list_button = T("List Resources"),
label_delete_button = T("Delete Resource"),
msg_record_created = T("Resource added"),
msg_record_modified = T("Resource updated"),
msg_record_deleted = T("Resource deleted"),
msg_list_empty = T("No resources currently defined for this alert"))
# @todo: complete custom form
crud_form = S3SQLCustomForm(#"name",
"info_id",
"resource_desc",
S3SQLInlineComponent("image",
label=T("Image"),
fields=["file",
],
),
S3SQLInlineComponent("document",
label=T("Document"),
fields=["file",
],
),
)
configure(tablename,
super_entity = "doc_entity",
crud_form = crud_form,
# Shouldn't be required if all UI actions go through alert controller & XSLT configured appropriately
create_onaccept = update_alert_id(tablename),
)
# ---------------------------------------------------------------------
# CAP Area segments
#
# Area elements sit inside the Info segment of the export XML
# - however in most cases these would be common across all Infos, so in
# our internal UI we link these primarily to the Alert but still
# allow the option to differentiate by Info
#
# Each <area> can have multiple elements which are one of <polygon>,
# <circle>, or <geocode>.
# <polygon> and <circle> are explicit geometry elements.
# <geocode> is a key-value pair in which the key is a standard
# geocoding system like SAME, FIPS, ZIP, and the value is a defined
# value in that system. The region described by the <area> is the
# union of the areas described by the individual elements, but the
# CAP spec advises that, if geocodes are included, the concrete
# geometry elements should outline the area specified by the geocodes,
# as not all recipients will have access to the meanings of the
# geocodes. However, since geocodes are a compact way to describe an
# area, it may be that they will be used without accompanying geometry,
# so we should not count on having <polygon> or <circle>.
#
# Geometry elements are each represented by a gis_location record, and
# linked to the cap_area record via the cap_area_location link table.
# For the moment, <circle> objects are stored with the center in the
# gis_location's lat, lon, and radius (in km) as a tag "radius" and
# value. ToDo: Later, we will add CIRCLESTRING WKT.
#
# Geocode elements are currently stored as key value pairs in the
# cap_area record.
#
# <area> can also specify a minimum altitude and maximum altitude
# ("ceiling"). These are stored in explicit fields for now, but could
# be replaced by key value pairs, if it is found that they are rarely
# used.
#
# (An alternative would be to have cap_area link to a gis_location_group
# record. In that case, the geocode tags could be stored in the
# gis_location_group's overall gis_location element's tags. The altitude
# could be stored in the overall gis_location's elevation, with ceiling
# stored in a tag. We could consider adding a maximum elevation field.)
tablename = "cap_area"
define_table(tablename,
alert_id(),
info_id(),
Field("is_template", "boolean",
default = False,
readable = False,
writable = False,
),
Field("name",
label = T("Area Description"),
required = True,
),
Field("altitude", "integer", # Feet above Sea-level in WGS84 (Specific or Minimum is using a range)
label = T("Altitude"),
),
Field("ceiling", "integer", # Feet above Sea-level in WGS84 (Maximum)
label = T("Ceiling"),
),
# Only used for Templates
self.event_type_id(script = '''
$.filterOptionsS3({
'trigger':'event_type_id',
'target':'priority',
'lookupURL':S3.Ap.concat('/cap/priority_get/'),
'lookupResource':'event_type'
})'''
),
# Only used for Templates
Field("priority",
label = T("Priority"),
represent = priority_represent,
requires = IS_EMPTY_OR(
IS_ONE_OF(
db, "cap_warning_priority.id",
priority_represent
),
),
),
*s3_meta_fields())
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Area"),
title_display = T("Alert Area"),
title_list = T("Areas"),
title_update = T("Edit Area"),
subtitle_list = T("List Areas"),
label_list_button = T("List Areas"),
label_delete_button = T("Delete Area"),
msg_record_created = T("Area added"),
msg_record_modified = T("Area updated"),
msg_record_deleted = T("Area deleted"),
msg_list_empty = T("No areas currently defined for this alert"))
crud_form = S3SQLCustomForm("alert_id",
"info_id",
"is_template",
"name",
"info_id",
S3SQLInlineComponent("location",
name = "location",
label = "",
multiple = False,
fields = [("", "location_id")],
),
S3SQLInlineComponent("tag",
name = "tag",
label = "",
fields = ["tag",
"value",
],
),
"altitude",
"ceiling",
"event_type_id",
"priority",
)
area_represent = cap_AreaRepresent(show_link=True)
configure(tablename,
#create_next = URL(f="area", args=["[id]", "location"]),
# Old: Shouldn't be required if all UI actions go through alert controller & XSLT configured appropriately
create_onaccept = update_alert_id(tablename),
crud_form = crud_form,
)
# Components
add_components(tablename,
cap_area_location = {"name": "location",
"joinby": "area_id",
},
cap_area_tag = {"name": "tag",
"joinby": "area_id",
},
# Names
cap_area_name = {"name": "name",
"joinby": "area_id",
},
)
area_id = S3ReusableField("area_id", "reference %s" % tablename,
label = T("Area"),
ondelete = "CASCADE",
represent = area_represent,
requires = IS_ONE_OF(db, "cap_area.id",
area_represent),
)
# ToDo: Use a widget tailored to entering <polygon> and <circle>.
# Want to be able to enter them by drawing on the map.
# Also want to allow selecting existing locations that have
# geometry, maybe with some filtering so the list isn't cluttered
# with irrelevant locations.
tablename = "cap_area_location"
define_table(tablename,
alert_id(readable = False,
writable = False,
),
area_id(),
self.gis_location_id(
widget = S3LocationSelector(points = False,
polygons = True,
show_map = True,
catalog_layers = True,
show_address = False,
show_postcode = False,
),
),
)
# CRUD Strings
crud_strings[tablename] = Storage(
label_create = T("Add Location"),
title_display = T("Alert Location"),
title_list = T("Locations"),
title_update = T("Edit Location"),
subtitle_list = T("List Locations"),
label_list_button = T("List Locations"),
label_delete_button = T("Delete Location"),
msg_record_created = T("Location added"),
msg_record_modified = T("Location updated"),
msg_record_deleted = T("Location deleted"),
msg_list_empty = T("No locations currently defined for this alert"))
configure(tablename,
# Shouldn't be required if all UI actions go through alert controller & XSLT configured appropriately
create_onaccept = update_alert_id(tablename),
)
# ---------------------------------------------------------------------
# Area Tags
# - Key-Value extensions
# - Used to hold for geocodes: key is the geocode system name, and
# value is the specific value for this area.
# - Could store other values here as well, to avoid dedicated fields
# in cap_area for rarely-used items like altitude and ceiling, but
# would have to distinguish those from geocodes.
#
# ToDo: Provide a mechanism for pre-loading geocodes that are not tied
# to individual areas.
# ToDo: Allow sharing the key-value pairs. Cf. Ruby on Rails tagging
# systems such as acts-as-taggable-on, which has a single table of tags
# used by all classes. Each tag record has the class and field that the
# tag belongs to, as well as the tag string. We'd want tag and value,
# but the idea is the same: There would be a table with tag / value
# pairs, and individual cap_area, event_event, org_whatever records
# would link to records in the tag table. So we actually would not have
# duplicate tag value records as we do now.
tablename = "cap_area_tag"
define_table(tablename,
alert_id(readable = False,
writable = False,
),
area_id(),
# ToDo: Allow selecting from a dropdown list of pre-defined
# geocode system names.
Field("tag",
label = T("Geocode Name"),
),
# ToDo: Once the geocode system is selected, fetch a list
# of current values for that geocode system. Allow adding
# new values, e.g. with combo box menu.
Field("value",
label = T("Value"),
),
s3_comments(),
*s3_meta_fields())
configure(tablename,
create_onaccept = update_alert_id(tablename),
# deduplicate = self.cap_area_tag_deduplicate,
)
# ---------------------------------------------------------------------
# Pass names back to global scope (s3.*)
return dict(cap_alert_id = alert_id,
cap_alert_represent = alert_represent,
cap_area_id = area_id,
cap_area_represent = area_represent,
cap_info_represent = info_represent,
cap_info_category_opts = cap_info_category_opts,
cap_template_represent = self.cap_template_represent,
)
# -------------------------------------------------------------------------
@staticmethod
def generate_identifier():
"""
Generate an identifier for a new form
"""
db = current.db
table = db.cap_alert
r = db().select(table.id,
limitby=(0, 1),
orderby=~table.id).first()
_time = datetime.datetime.strftime(datetime.datetime.utcnow(), "%Y%m%d")
if r:
next_id = int(r.id) + 1
else:
next_id = 1
# Format: prefix-time+-timezone+sequence-suffix
settings = current.deployment_settings
prefix = settings.get_cap_identifier_prefix() or current.xml.domain
oid = settings.get_cap_identifier_oid()
suffix = settings.get_cap_identifier_suffix()
return "%s-%s-%s-%03d%s%s" % \
(prefix, oid, _time, next_id, ["", "-"][bool(suffix)], suffix)
# -------------------------------------------------------------------------
@staticmethod
def generate_sender():
"""
Generate a sender for a new form
"""
try:
user_id = current.auth.user.id
except AttributeError:
return ""
return "%s/%d" % (current.xml.domain, user_id)
# -------------------------------------------------------------------------
@staticmethod
def generate_source():
"""
Generate a source for CAP alert
"""
return "%s@%s" % (current.xml.domain,
current.deployment_settings.get_base_public_url())
# -------------------------------------------------------------------------
@staticmethod
def get_expirydate():
"""
Default Expiry date based on the expire offset
"""
return current.request.utcnow + \
datetime.timedelta(days = current.deployment_settings.\
get_cap_expire_offset())
# -------------------------------------------------------------------------
@staticmethod
def cap_template_represent(id, row=None):
"""
Represent an alert template concisely
"""
if row:
id = row.id
elif not id:
return current.messages["NONE"]
else:
db = current.db
table = db.cap_alert
row = db(table.id == id).select(table.is_template,
table.template_title,
# left = table.on(table.id == table.parent_item_category_id), Doesn't work
limitby=(0, 1)).first()
try:
# @ToDo: Should get headline from "info"?
if row.is_template:
return row.template_title
else:
return s3db.cap_alert_represent(id)
except:
return current.messages.UNKNOWN_OPT
# -------------------------------------------------------------------------
@staticmethod
def list_string_represent(string, fmt=lambda v: v):
try:
if isinstance(string, list):
return ", ".join([fmt(i) for i in string])
elif isinstance(string, basestring):
return ", ".join([fmt(i) for i in string[1:-1].split("|")])
except IndexError:
return current.messages.UNKNOWN_OPT
return ""
# -------------------------------------------------------------------------
@staticmethod
def cap_alert_form_validation(form):
"""
On Validation for CAP alert form
"""
form_vars = form.vars
if form_vars.get("scope") == "Private" and not form_vars.get("addresses"):
form.errors["addresses"] = \
current.T("'Recipients' field mandatory in case of 'Private' scope")
return
# -------------------------------------------------------------------------
@staticmethod
def info_onaccept(form):
"""
After DB I/O
"""
if "vars" in form:
form_vars = form.vars
elif "id" in form:
form_vars = form
elif hasattr(form, "vars"):
form_vars = form.vars
else:
form_vars = form
info_id = form_vars.id
if not info_id:
return
db = current.db
atable = db.cap_alert
itable = db.cap_info
info = db(itable.id == info_id).select(itable.alert_id,
itable.event,
itable.event_type_id,
limitby=(0, 1)).first()
if info:
alert_id = info.alert_id
set_ = db(itable.id == info_id)
if alert_id and cap_alert_is_template(alert_id):
set_.update(is_template = True)
if not info.event:
set_.update(event = current.db.cap_info.event_type_id.\
represent(info.event_type_id))
# -------------------------------------------------------------------------
@staticmethod
def cap_alert_approve(record=None):
"""
Update the approved_on field when alert gets approved
"""
if not record:
return
alert_id = record["id"]
# Update approved_on at the time the alert is approved
if alert_id:
db = current.db
approved_on = record["approved_on"]
db(db.cap_alert.id == alert_id).update(approved_on = current.request.utcnow)
# =============================================================================
class S3CAPAreaNameModel(S3Model):
"""
CAP Name Model:
-local names for CAP Area
"""
names = ("cap_area_name",
)
def model(self):
T = current.T
l10n_languages = current.deployment_settings.get_L10n_languages()
# ---------------------------------------------------------------------
# Local Names
#
tablename = "cap_area_name"
self.define_table(tablename,
self.cap_area_id(empty = False,
ondelete = "CASCADE",
),
Field("language",
label = T("Language"),
represent = lambda opt: l10n_languages.get(opt,
current.messages.UNKNOWN_OPT),
requires = IS_ISO639_2_LANGUAGE_CODE(),
),
Field("name_l10n",
label = T("Local Name"),
),
s3_comments(),
*s3_meta_fields())
self.configure(tablename,
deduplicate = S3Duplicate(primary=("area_id", "language")),
)
# Pass names back to global scope (s3.*)
return {}
# =============================================================================
def cap_info_labels():
"""
Labels for CAP info segments
"""
T = current.T
return dict(language=T("Language"),
category=T("Category"),
event=T("Event"),
response_type=T("Response type"),
urgency=T("Urgency"),
severity=T("Severity"),
certainty=T("Certainty"),
audience=T("Audience"),
event_code=T("Event code"),
effective=T("Effective"),
onset=T("Onset"),
expires=T("Expires at"),
sender_name=T("Sender's name"),
headline=T("Headline"),
description=T("Description"),
instruction=T("Instruction"),
web=T("URL"),
contact=T("Contact information"),
parameter=T("Parameters")
)
# =============================================================================
def cap_alert_is_template(alert_id):
"""
Tell whether an alert entry is a template
"""
if not alert_id:
return False
table = current.s3db.cap_alert
query = (table.id == alert_id)
r = current.db(query).select(table.is_template,
limitby=(0, 1)).first()
return r and r.is_template
# =============================================================================
def cap_rheader(r):
""" Resource Header for CAP module """
rheader = None
if r.representation == "html":
record = r.record
if record:
T = current.T
s3db = current.s3db
tablename = r.tablename
if tablename == "cap_alert":
alert_id = record.id
itable = s3db.cap_info
row = current.db(itable.alert_id == alert_id).\
select(itable.id,
limitby=(0, 1)).first()
if record.is_template:
if not (row and row.id):
error = DIV(T("An alert needs to contain at least one info item."),
_class="error")
else:
error = ""
tabs = [(T("Template"), None),
(T("Information template"), "info"),
#(T("Area"), "area"),
#(T("Resource Files"), "resource"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
rheader = DIV(TABLE(TR(TH("%s: " % T("Template")),
TD(A(s3db.cap_template_represent(alert_id, record),
_href=URL(c="cap", f="template",
args=[alert_id, "update"]))),
),
),
rheader_tabs,
error
)
else:
if not (row and row.id):
error = DIV(T("You need to create at least one alert information item in order to be able to broadcast this alert!"),
_class="error")
export_btn = ""
submit_btn = None
else:
error = ""
export_btn = A(DIV(_class="export_cap_large"),
_href=URL(c="cap", f="alert", args=["%s.cap" % alert_id]),
_target="_blank",
)
# Display 'Submit for Approval' based on permission
# and deployment settings
if not r.record.approved_by and \
current.deployment_settings.get_cap_authorisation() and \
current.auth.s3_has_permission("update", "cap_alert",
record_id=alert_id):
# Get the user ids for the role alert_approver
db = current.db
agtable = db.auth_group
group_rows = db(agtable.role == "Alert Approver").\
select(agtable.id)
if group_rows:
group_members = current.auth.s3_group_members
user_pe_id = current.auth.s3_user_pe_id
for group_row in group_rows:
group_id = group_row.id
user_ids = group_members(group_id) # List of user_ids
pe_ids = [] # List of pe_ids
pe_append = pe_ids.append
for user_id in user_ids:
pe_append(user_pe_id(int(user_id)))
submit_btn = A(T("Submit for Approval"),
_href = URL(f = "compose",
vars = {"cap_alert.id": record.id,
"pe_ids": pe_ids,
},
),
_class = "action-btn"
)
else:
submit_btn = None
else:
submit_btn = None
tabs = [(T("Alert Details"), None),
(T("Information"), "info"),
(T("Area"), "area"),
(T("Resource Files"), "resource"),
]
if r.representation == "html" and \
current.auth.s3_has_permission("update", "cap_alert",
record_id=alert_id):
# Check to see if 'Predefined Areas' tab need to be added
artable = s3db.cap_area
query = (artable.is_template == True) & \
(artable.deleted == False)
template_area_rows = current.db(query)._select(artable.id,
limitby=(0, 1))
if template_area_rows:
tabs.insert(2, (T("Predefined Areas"), "assign"))
# Display "Copy" Button to copy record from the opened info
if r.component_name == "info" and \
r.component_id:
copy_btn = A(T("Copy"),
_href = URL(f = "alert",
args = [r.id, "info", "create",],
vars = {"from_record": r.component_id,
},
),
_class = "action-btn"
)
else:
copy_btn = None
else:
copy_btn = None
rheader_tabs = s3_rheader_tabs(r, tabs)
rheader = DIV(TABLE(TR(TH("%s: " % T("Alert")),
TD(A(s3db.cap_alert_represent(alert_id, record),
_href=URL(c="cap", f="alert",
args=[alert_id, "update"]))),
),
TR(export_btn)
),
rheader_tabs,
error
)
if copy_btn:
rheader.insert(1, TR(TD(copy_btn)))
if submit_btn:
rheader.insert(1, TR(TD(submit_btn)))
elif tablename == "cap_area":
# Used only for Area Templates
tabs = [(T("Area"), None),
]
if current.deployment_settings.get_L10n_translate_cap_area():
tabs.insert(1, (T("Local Names"), "name"))
rheader_tabs = s3_rheader_tabs(r, tabs)
rheader = DIV(TABLE(TR(TH("%s: " % T("Alert")),
TD(A(s3db.cap_alert_represent(record.alert_id),
_href=URL(c="cap", f="alert",
args=[record.alert_id, "update"])))
),
TR(TH("%s: " % T("Information")),
TD(A(s3db.cap_info_represent(record.info_id),
_href=URL(c="cap", f="info",
args=[record.info_id, "update"]))),
),
TR(TH("%s: " % T("Area")),
TD(A(s3db.cap_area_represent(record.id, record),
_href=URL(c="cap", f="area",
args=[record.id, "update"]))),
),
),
rheader_tabs
)
elif tablename == "cap_info":
# Shouldn't ever be called
tabs = [(T("Information"), None),
(T("Resource Files"), "resource"),
]
if cap_alert_is_template(record.alert_id):
rheader_tabs = s3_rheader_tabs(r, tabs)
table = r.table
rheader = DIV(TABLE(TR(TH("%s: " % T("Template")),
TD(A(s3db.cap_template_represent(record.alert_id),
_href=URL(c="cap", f="template",
args=[record.alert_id, "update"]))),
),
TR(TH("%s: " % T("Info template")),
TD(A(s3db.cap_info_represent(record.id, record),
_href=URL(c="cap", f="info",
args=[record.id, "update"]))),
)
),
rheader_tabs,
_class="cap_info_template_form"
)
current.response.s3.js_global.append('''i18n.cap_locked="%s"''' % T("Locked"))
else:
tabs.insert(1, (T("Areas"), "area"))
rheader_tabs = s3_rheader_tabs(r, tabs)
table = r.table
rheader = DIV(TABLE(TR(TH("%s: " % T("Alert")),
TD(A(s3db.cap_alert_represent(record.alert_id),
_href=URL(c="cap", f="alert",
args=[record.alert_id, "update"]))),
),
TR(TH("%s: " % T("Information")),
TD(A(s3db.cap_info_represent(record.id, record),
_href=URL(c="cap", f="info",
args=[record.id, "update"]))),
)
),
rheader_tabs
)
return rheader
# =============================================================================
def update_alert_id(tablename):
""" On-accept for area and resource records """
def func(form):
if "vars" in form:
form_vars = form.vars
elif "id" in form:
form_vars = form
elif hasattr(form, "vars"):
form_vars = form.vars
else:
form_vars = form
if form_vars.get("alert_id", None):
# Nothing to do
return
# Look up from the info/area
_id = form_vars.id
if not _id:
return
db = current.db
table = db[tablename]
if tablename == "cap_area_location" or tablename == "cap_area_tag":
area_id = form_vars.get("area_id", None)
if not area_id:
# Get the full record
item = db(table.id == _id).select(table.alert_id,
table.area_id,
limitby=(0, 1)).first()
try:
alert_id = item.alert_id
area_id = item.area_id
except:
# Nothing we can do
return
if alert_id:
# Nothing to do
return
atable = db.cap_area
area = db(atable.id == area_id).select(atable.alert_id,
limitby=(0, 1)).first()
try:
alert_id = area.alert_id
except:
# Nothing we can do
return
else:
# cap_area or cap_resource
info_id = form_vars.get("info_id", None)
if not info_id:
# Get the full record
item = db(table.id == _id).select(table.alert_id,
table.info_id,
limitby=(0, 1)).first()
try:
alert_id = item.alert_id
info_id = item.info_id
except:
# Nothing we can do
return
if alert_id:
# Nothing to do
return
itable = db.cap_info
info = db(itable.id == info_id).select(itable.alert_id,
limitby=(0, 1)).first()
try:
alert_id = info.alert_id
except:
# Nothing we can do
return
if alert_id:
db(table.id == _id).update(alert_id = alert_id)
return func
# =============================================================================
def cap_gis_location_xml_post_parse(element, record):
"""
UNUSED - done in XSLT
Convert CAP polygon representation to WKT; extract circle lat lon.
Latitude and longitude in CAP are expressed as signed decimal values in
coordinate pairs:
latitude,longitude
The circle text consists of:
latitude,longitude radius
where the radius is in km.
Polygon text consists of a space separated sequence of at least 4
coordinate pairs where the first and last are the same.
lat1,lon1 lat2,lon2 lat3,lon3 ... lat1,lon1
"""
# @ToDo: Extract altitude and ceiling from the enclosing <area>, and
# compute an elevation value to apply to all enclosed gis_locations.
cap_polygons = element.xpath("cap_polygon")
if cap_polygons:
cap_polygon_text = cap_polygons[0].text
# CAP polygons and WKT have opposite separator conventions:
# CAP has spaces between coordinate pairs and within pairs the
# coordinates are separated by comma, and vice versa for WKT.
# Unfortunately, CAP and WKT (as we use it) also have opposite
# orders of lat and lon. CAP has lat lon, WKT has lon lat.
# Both close the polygon by repeating the first point.
cap_points_text = cap_polygon_text.split()
cap_points = [cpoint.split(",") for cpoint in cap_points_text]
# @ToDo: Should we try interpreting all the points as decimal numbers,
# and failing validation if they're wrong?
wkt_points = ["%s %s" % (cpoint[1], cpoint[0]) for cpoint in cap_points]
wkt_polygon_text = "POLYGON ((%s))" % ", ".join(wkt_points)
record.wkt = wkt_polygon_text
return
cap_circle_values = element.xpath("resource[@name='gis_location_tag']/data[@field='tag' and text()='cap_circle']/../data[@field='value']")
if cap_circle_values:
cap_circle_text = cap_circle_values[0].text
coords, radius = cap_circle_text.split()
lat, lon = coords.split(",")
try:
# If any of these fail to interpret as numbers, the circle was
# badly formatted. For now, we don't try to fail validation,
# but just don't set the lat, lon.
lat = float(lat)
lon = float(lon)
radius = float(radius)
except ValueError:
return
record.lat = lat
record.lon = lon
# Add a bounding box for the given radius, if it is not zero.
if radius > 0.0:
bbox = current.gis.get_bounds_from_radius(lat, lon, radius)
record.lat_min = bbox["lat_min"]
record.lon_min = bbox["lon_min"]
record.lat_max = bbox["lat_max"]
record.lon_max = bbox["lon_max"]
# =============================================================================
def cap_gis_location_xml_post_render(element, record):
"""
UNUSED - done in XSLT
Convert Eden WKT polygon (and eventually circle) representation to
CAP format and provide them in the rendered s3xml.
Not all internal formats have a parallel in CAP, but an effort is made
to provide a resonable substitute:
Polygons are supported.
Circles that were read in from CAP (and thus carry the original CAP
circle data) are supported.
Multipolygons are currently rendered as their bounding box.
Points are rendered as zero radius circles.
Latitude and longitude in CAP are expressed as signed decimal values in
coordinate pairs:
latitude,longitude
The circle text consists of:
latitude,longitude radius
where the radius is in km.
Polygon text consists of a space separated sequence of at least 4
coordinate pairs where the first and last are the same.
lat1,lon1 lat2,lon2 lat3,lon3 ... lat1,lon1
"""
# @ToDo: Can we rely on gis_feature_type == 3 to tell if the location is a
# polygon, or is it better to look for POLYGON in the wkt? For now, check
# both.
# @ToDo: CAP does not support multipolygons. Do we want to extract their
# outer polygon if passed MULTIPOLYGON wkt? For now, these are exported
# with their bounding box as the polygon.
# @ToDo: What if a point (gis_feature_type == 1) that is not a CAP circle
# has a non-point bounding box? Should it be rendered as a polygon for
# the bounding box?
try:
from lxml import etree
except:
# This won't fail, since we're in the middle of processing xml.
return
SubElement = etree.SubElement
s3xml = current.xml
TAG = s3xml.TAG
RESOURCE = TAG["resource"]
DATA = TAG["data"]
ATTRIBUTE = s3xml.ATTRIBUTE
NAME = ATTRIBUTE["name"]
FIELD = ATTRIBUTE["field"]
VALUE = ATTRIBUTE["value"]
loc_tablename = "gis_location"
tag_tablename = "gis_location_tag"
tag_fieldname = "tag"
val_fieldname = "value"
polygon_tag = "cap_polygon"
circle_tag = "cap_circle"
fallback_polygon_tag = "cap_polygon_fallback"
fallback_circle_tag = "cap_circle_fallback"
def __cap_gis_location_add_polygon(element, cap_polygon_text, fallback=False):
"""
Helper for cap_gis_location_xml_post_render that adds the CAP polygon
data to the current element in a gis_location_tag element.
"""
# Make a gis_location_tag.
tag_resource = SubElement(element, RESOURCE)
tag_resource.set(NAME, tag_tablename)
tag_field = SubElement(tag_resource, DATA)
# Add tag and value children.
tag_field.set(FIELD, tag_fieldname)
if fallback:
tag_field.text = fallback_polygon_tag
else:
tag_field.text = polygon_tag
val_field = SubElement(tag_resource, DATA)
val_field.set(FIELD, val_fieldname)
val_field.text = cap_polygon_text
def __cap_gis_location_add_circle(element, lat, lon, radius, fallback=False):
"""
Helper for cap_gis_location_xml_post_render that adds CAP circle
data to the current element in a gis_location_tag element.
"""
# Make a gis_location_tag.
tag_resource = SubElement(element, RESOURCE)
tag_resource.set(NAME, tag_tablename)
tag_field = SubElement(tag_resource, DATA)
# Add tag and value children.
tag_field.set(FIELD, tag_fieldname)
if fallback:
tag_field.text = fallback_circle_tag
else:
tag_field.text = circle_tag
val_field = SubElement(tag_resource, DATA)
val_field.set(FIELD, val_fieldname)
# Construct a CAP circle string: latitude,longitude radius
cap_circle_text = "%s,%s %s" % (lat, lon, radius)
val_field.text = cap_circle_text
# Sort out the geometry case by wkt, CAP tags, gis_feature_type, bounds,...
# Check the two cases for CAP-specific locations first, as those will have
# definite export values. For others, we'll attempt to produce either a
# circle or polygon: Locations with a bounding box will get a box polygon,
# points will get a zero-radius circle.
# Currently wkt is stripped out of gis_location records right here:
# https://github.com/flavour/eden/blob/master/modules/s3/s3resource.py#L1332
# https://github.com/flavour/eden/blob/master/modules/s3/s3resource.py#L1426
# https://github.com/flavour/eden/blob/master/modules/s3/s3resource.py#L3152
# Until we provide a way to configure that choice, this will not work for
# polygons.
wkt = record.get("wkt", None)
# WKT POLYGON: Although there is no WKT spec, according to every reference
# that deals with nested polygons, the outer, enclosing, polygon must be
# listed first. Hence, we extract only the first polygon, as CAP has no
# provision for nesting.
if wkt and wkt.startswith("POLYGON"):
# ToDo: Is it sufficient to test for adjacent (( to find the start of
# the polygon, or might there be whitespace between them?
start = wkt.find("((")
end = wkt.find(")")
if start >=0 and end >=0:
polygon_text = wkt[start + 2 : end]
points_text = polygon_text.split(",")
points = [p.split() for p in points_text]
cap_points_text = ["%s,%s" % (point[1], point[0]) for point in points]
cap_polygon_text = " ".join(cap_points_text)
__cap_gis_location_add_polygon(element, cap_polygon_text)
return
# Fall through if the wkt string was mal-formed.
# CAP circle stored in a gis_location_tag with tag = cap_circle.
# If there is a cap_circle tag, we don't need to do anything further, as
# export.xsl will use it. However, we don't know if there is a cap_circle
# tag...
#
# @ToDo: The export calls xml_post_render after processing a resource's
# fields, but before its components are added as children in the xml tree.
# If this were delayed til after the components were added, we could look
# there for the cap_circle gis_location_tag record. Since xml_post_parse
# isn't in use yet (except for this), maybe we could look at moving it til
# after the components?
#
# For now, with the xml_post_render before components: We could do a db
# query to check for a real cap_circle tag record, and not bother with
# creating fallbacks from bounding box or point...but we don't have to.
# Instead, just go ahead and add the fallbacks under different tag names,
# and let the export.xsl sort them out. This only wastes a little time
# compared to a db query.
# ToDo: MULTIPOLYGON -- Can stitch together the outer polygons in the
# multipolygon, but would need to assure all were the same handedness.
# The remaining cases are for locations that don't have either polygon wkt
# or a cap_circle tag.
# Bounding box: Make a four-vertex polygon from the bounding box.
# This is a fallback, as if there is a circle tag, we'll use that.
lon_min = record.get("lon_min", None)
lon_max = record.get("lon_max", None)
lat_min = record.get("lat_min", None)
lat_max = record.get("lat_max", None)
if lon_min and lon_max and lat_min and lat_max and \
(lon_min != lon_max) and (lat_min != lat_max):
# Although there is no WKT requirement, arrange the points in
# counterclockwise order. Recall format is:
# lat1,lon1 lat2,lon2 ... latN,lonN, lat1,lon1
cap_polygon_text = \
"%(lat_min)s,%(lon_min)s %(lat_min)s,%(lon_max)s %(lat_max)s,%(lon_max)s %(lat_max)s,%(lon_min)s %(lat_min)s,%(lon_min)s" \
% {"lon_min": lon_min,
"lon_max": lon_max,
"lat_min": lat_min,
"lat_max": lat_max}
__cap_gis_location_add_polygon(element, cap_polygon_text, fallback=True)
return
# WKT POINT or location with lat, lon: This can be rendered as a
# zero-radius circle.
# Q: Do we put bounding boxes around POINT locations, and are they
# meaningful?
lat = record.get("lat", None)
lon = record.get("lon", None)
if not lat or not lon:
# Look for POINT.
if wkt and wkt.startswith("POINT"):
start = wkt.find("(")
end = wkt.find(")")
if start >=0 and end >=0:
point_text = wkt[start + 2 : end]
point = point_text.split()
try:
lon = float(point[0])
lat = float(point[1])
except ValueError:
pass
if lat and lon:
# Add a (fallback) circle with zero radius.
__cap_gis_location_add_circle(element, lat, lon, 0, True)
return
# ToDo: Other WKT.
# Did not find anything to use. Presumably the area has a text description.
return
# =============================================================================
def cap_alert_list_layout(list_id, item_id, resource, rfields, record):
"""
Default dataList item renderer for CAP Alerts on the Home page.
@param list_id: the HTML ID of the list
@param item_id: the HTML ID of the item
@param resource: the S3Resource to render
@param rfields: the S3ResourceFields to render
@param record: the record as dict
"""
record_id = record["cap_alert.id"]
item_class = "thumbnail"
T = current.T
#raw = record._row
# @ToDo: handle the case where we have multiple info segments &/or areas
headline = record["cap_info.headline"]
location = record["cap_area.name"]
priority = record["cap_info.priority"]
status = record["cap_alert.status"]
scope = record["cap_alert.scope"]
event = record["cap_info.event_type_id"]
if current.auth.s3_logged_in():
_href = URL(c="cap", f="alert", args=[record_id, "profile"])
else:
_href = URL(c="cap", f="public", args=[record_id, "profile"])
headline = A(headline,
_href = _href,
_target = "_blank",
)
if list_id == "map_popup":
itable = current.s3db.cap_info
# Map popup
event = itable.event_type_id.represent(event)
if priority is None:
priority = T("Unknown")
else:
priority = itable.priority.represent(priority)
description = record["cap_info.description"]
response_type = record["cap_info.response_type"]
sender = record["cap_info.sender_name"]
last = TAG[""](BR(),
description,
BR(),
", ".join(response_type),
BR(),
sender,
BR(),
)
else:
if priority == current.messages["NONE"]:
priority = T("Unknown")
last = BR()
details = "%s %s %s" % (priority, status, scope)
more = A(T("Full Alert"),
_href = _href,
_target = "_blank",
)
item = DIV(headline,
BR(),
location,
BR(),
details,
BR(),
event,
last,
more,
_class=item_class,
_id=item_id,
)
return item
# =============================================================================
def add_area_from_template(area_id, alert_id):
"""
Add an area from a Template along with its components Location and Tag
"""
afieldnames = ("name",
"altitude",
"ceiling",
)
lfieldnames = ("location_id",
)
tfieldnames = ("tag",
"value",
"comments",
)
db = current.db
s3db = current.s3db
atable = s3db.cap_area
ltable = s3db.cap_area_location
ttable = s3db.cap_area_tag
# Create Area Record from Template
atemplate = db(atable.id == area_id).select(limitby=(0, 1),
*afieldnames).first()
adata = {"is_template": False,
"alert_id": alert_id
}
for field in afieldnames:
adata[field] = atemplate[field]
aid = atable.insert(**adata)
# Add Area Location Components of Template
ltemplate = db(ltable.area_id == area_id).select(*lfieldnames)
for rows in ltemplate:
ldata = {"area_id": aid,
"alert_id": alert_id}
for field in lfieldnames:
ldata[field] = rows[field]
lid = ltable.insert(**ldata)
# Add Area Tag Components of Template
ttemplate = db(ttable.area_id == area_id).select(*tfieldnames)
for row in ttemplate:
tdata = {"area_id": aid}
for field in tfieldnames:
tdata[field] = row[field]
tid = ttable.insert(**tdata)
return aid
# =============================================================================
class CAPImportFeed(S3Method):
"""
Import CAP alerts from a URL
"""
# -------------------------------------------------------------------------
@staticmethod
def apply_method(r, **attr):
"""
Apply method.
@param r: the S3Request
@param attr: controller options for this request
"""
if r.representation == "html":
T = current.T
request = current.request
response = current.response
title = T("Import from Feed URL")
# @ToDo: use Formstyle
form = FORM(
TABLE(
TR(TD(DIV(B("%s:" % T("URL")),
SPAN(" *", _class="req"))),
TD(INPUT(_type="text", _name="url",
_id="url", _value="")),
TD(),
),
TR(TD(B("%s: " % T("User"))),
TD(INPUT(_type="text", _name="user",
_id="user", _value="")),
TD(),
),
TR(TD(B("%s: " % T("Password"))),
TD(INPUT(_type="text", _name="password",
_id="password", _value="")),
TD(),
),
TR(TD(B("%s: " % T("Ignore Errors?"))),
TD(INPUT(_type="checkbox", _name="ignore_errors",
_id="ignore_errors")),
TD(),
),
TR(TD(),
TD(INPUT(_type="submit", _value=T("Import"))),
TD(),
)
)
)
response.view = "create.html"
output = dict(title=title,
form=form)
if form.accepts(request.vars, current.session):
form_vars = form.vars
url = form_vars.get("url", None)
if not url:
response.error = T("URL is required")
return output
# @ToDo:
username = form_vars.get("username", None)
password = form_vars.get("password", None)
try:
file = fetch(url)
except urllib2.URLError:
response.error = str(sys.exc_info()[1])
return output
except urllib2.HTTPError:
response.error = str(sys.exc_info()[1])
return output
File = StringIO(file)
stylesheet = os.path.join(request.folder, "static", "formats",
"cap", "import.xsl")
xml = current.xml
tree = xml.parse(File)
resource = current.s3db.resource("cap_alert")
s3xml = xml.transform(tree, stylesheet_path=stylesheet,
name=resource.name)
try:
resource.import_xml(s3xml,
ignore_errors=form_vars.get("ignore_errors", None))
except:
response.error = str(sys.exc_info()[1])
else:
import_count = resource.import_count
if import_count:
response.confirmation = "%s %s" % \
(import_count,
T("Alerts successfully imported."))
else:
response.information = T("No Alerts available.")
return output
else:
raise HTTP(501, current.ERROR.BAD_METHOD)
# -----------------------------------------------------------------------------
class cap_AssignArea(S3Method):
"""
Assign CAP area to an alert, allows (multi-)selection of Predefined areas
"""
def apply_method(self, r, **attr):
"""
Apply method.
@param r: the S3Request
@param attr: controller options for this request
"""
if not r.record:
# Must be called for a particular alert
r.error(404, current.ERROR.BAD_RECORD)
# The record ID of the alert the method is called for
alert_id = r.id
# Requires permission to update this alert
authorised = current.auth.s3_has_permission("update", "cap_alert",
record_id=alert_id)
if not authorised:
r.unauthorised()
T = current.T
s3db = current.s3db
response = current.response
# Filter to limit the selection of areas
area_filter = (FS("is_template") == True)
if r.http == "POST":
# Template areas have been selected
added = 0
post_vars = r.post_vars
if all([n in post_vars for n in ("assign", "selected", "mode")]):
selected = post_vars.selected
if selected:
selected = selected.split(",")
else:
selected = []
# Handle exclusion filter
if post_vars.mode == "Exclusive":
# URL filters
if "filterURL" in post_vars:
filters = S3URLQuery.parse_url(post_vars.ajaxURL)
else:
filters = None
query = area_filter & (~(FS("id").belongs(selected)))
aresource = s3db.resource("cap_area",
filter = query,
vars = filters)
rows = aresource.select(["id"], as_rows=True)
selected = [str(row.id) for row in rows]
for area_id in selected:
area_id = int(area_id.strip())
add_area_from_template(area_id, alert_id)
added += 1
current.session.confirmation = T("%(number)s assigned") % \
{"number": added}
if added > 0:
# Redirect to the list of areas of this alert
redirect(URL(args=[r.id, "area"], vars={}))
else:
# Return to the "assign" page
redirect(URL(args=r.args, vars={}))
elif r.http == "GET":
# Filter widgets (@todo: lookup from cap_area resource config?)
filter_widgets = []
# List fields
list_fields = ["id",
"name",
"event_type_id",
"priority",
]
# Data table
aresource = s3db.resource("cap_area", filter=area_filter)
totalrows = aresource.count()
get_vars = r.get_vars
if "pageLength" in get_vars:
display_length = get_vars["pageLength"]
if display_length == "None":
display_length = None
else:
display_length = int(display_length)
else:
display_length = 25
if display_length:
limit = 4 * display_length
else:
limit = None
# Datatable filter and sorting
query, orderby, left = aresource.datatable_filter(list_fields,
get_vars,
)
aresource.add_filter(query)
# Extract the data
data = aresource.select(list_fields,
start = 0,
limit = limit,
orderby = orderby,
left = left,
count = True,
represent = True,
)
filteredrows = data.numrows
# Instantiate the datatable
dt = S3DataTable(data.rfields, data.rows)
dt_id = "datatable"
# Bulk actions
dt_bulk_actions = [(T("Assign"), "assign")]
if r.representation == "html":
# Page load
# Disallow deletion from this table, and link all open-buttons
# to the respective area read page
aresource.configure(deletable = False)
profile_url = URL(c = "cap",
f = "area",
args = ["[id]", "read"],
)
S3CRUD.action_buttons(r,
deletable = False,
read_url = profile_url,
update_url = profile_url,
)
# Hide export icons
response.s3.no_formats = True
# Render the datatable (will be "items" in the output dict)
items = dt.html(totalrows,
filteredrows,
dt_id,
dt_ajax_url = URL(args = r.args,
extension="aadata",
vars={},
),
dt_bulk_actions = dt_bulk_actions,
dt_pageLength = display_length,
dt_pagination = "true",
dt_searching = "false",
)
# Filter form
if filter_widgets:
# Where to retrieve filtered data from:
get_vars = aresource.crud._remove_filters(r.get_vars)
filter_submit_url = r.url(vars=get_vars)
# Where to retrieve updated filter options from:
filter_ajax_url = URL(f="cap_area",
args=["filter.options"],
vars={},
)
get_config = aresource.get_config
filter_clear = get_config("filter_clear", True)
filter_formstyle = get_config("filter_formstyle", None)
filter_submit = get_config("filter_submit", True)
filter_form = S3FilterForm(filter_widgets,
clear = filter_clear,
formstyle = filter_formstyle,
submit = filter_submit,
ajax = True,
url = filter_submit_url,
ajaxurl = filter_ajax_url,
_class = "filter-form",
_id = "datatable-filter-form",
)
fresource = s3db.resource("cap_area")
ff = filter_form.html(fresource,
r.get_vars,
target = "datatable",
)
else:
ff = ""
output = {"items": items, # the datatable
"title": T("Add Areas"),
"list_filter_form": ff,
}
response.view = "list_filter.html"
return output
elif r.representation == "aadata":
# Ajax refresh
if "draw" in get_vars:
echo = int(get_vars.draw)
else:
echo = None
items = dt.json(totalrows,
filteredrows,
dt_id,
echo,
dt_bulk_actions=dt_bulk_actions,
)
response.headers["Content-Type"] = "application/json"
return items
else:
r.error(501, current.ERROR.BAD_FORMAT)
else:
r.error(405, current.ERROR.BAD_METHOD)
# -----------------------------------------------------------------------------
class cap_AreaRepresent(S3Represent):
""" Representation of CAP Area """
def __init__(self,
show_link=False,
multiple=False):
settings = current.deployment_settings
# Translation using cap_area_name & not T()
translate = settings.get_L10n_translate_cap_area()
if translate:
language = current.session.s3.language
if language == settings.get_L10n_default_language():
translate = False
super(cap_AreaRepresent,
self).__init__(lookup="cap_area",
show_link=show_link,
translate=translate,
multiple=multiple
)
# -------------------------------------------------------------------------
def lookup_rows(self, key, values, fields=None):
"""
Custom lookup method for Area(CAP) rows.Parameters
key and fields are not used, but are kept for API
compatibility reasons.
@param values: the cap_area IDs
"""
db = current.db
s3db = current.s3db
artable = s3db.cap_area
count = len(values)
if count == 1:
query = (artable.id == values[0])
else:
query = (artable.id.belongs(values))
fields = [artable.id,
artable.name,
]
if self.translate:
ltable = s3db.cap_area_name
fields += [ltable.name_l10n,
]
left = [ltable.on((ltable.area_id == artable.id) & \
(ltable.language == current.session.s3.language)),
]
else:
left = None
rows = current.db(query).select(left = left,
limitby = (0, count),
*fields)
return rows
# -------------------------------------------------------------------------
def represent_row(self, row):
"""
Represent a single Row
@param row: the cap_area Row
"""
if self.translate:
name = row["cap_area_name.name_l10n"] or row["cap_area.name"]
else:
name = row["cap_area.name"]
if not name:
return self.default
return s3_unicode(name)
# END =========================================================================
|
mit
| 8,098,246,358,597,634,000
| 43.661408
| 142
| 0.429789
| false
| 5.030208
| false
| false
| false
|
holtjma/msbwt
|
MUS/util.py
|
1
|
4898
|
'''
Created on Nov 1, 2013
@summary: this file mostly contains some auxiliary checks for the command line interface to make sure it's
handed correct file types
@author: holtjma
'''
import argparse as ap
import glob
import os
#I see no need for the versions to be different as of now
DESC = "A multi-string BWT package for DNA and RNA."
VERSION = '0.3.0'
PKG_VERSION = VERSION
validCharacters = set(['$', 'A', 'C', 'G', 'N', 'T'])
def readableFastqFile(fileName):
'''
@param filename - must be both an existing and readable fastq file, supported under '.txt' and '.gz' as of now
'''
if os.path.isfile(fileName) and os.access(fileName, os.R_OK):
if fileName.endswith('.txt') or fileName.endswith('.gz') or fileName.endswith('.fastq') or fileName.endswith('.fq'):
return fileName
else:
raise ap.ArgumentTypeError("Wrong file format ('.txt', '.gz', '.fastq', or '.fq' required): '%s'" % fileName)
else:
raise ap.ArgumentTypeError("Cannot read file '%s'." % fileName)
'''
TODO: REMOVE UNUSED FUNCTION
'''
def readableNpyFile(fileName):
if os.path.isfile(fileName) and os.access(fileName, os.R_OK):
if fileName.endswith('.npy'):
return fileName
else:
raise ap.ArgumentTypeError("Wrong file format ('.npy' required): '%s'" % fileName)
else:
raise ap.ArgumentTypeError("Cannot read file '%s'." % fileName)
'''
TODO: REMOVE UNUSED FUNCTION
'''
def writableNpyFile(fileName):
if os.access(os.path.dirname(fileName), os.W_OK):
if fileName.endswith('.npy'):
return fileName
else:
raise ap.ArgumentTypeError("Wrong file format ('.npy' required): '%s'." % fileName)
else:
raise ap.ArgumentTypeError("Cannot write file '%s'." % fileName)
def newDirectory(dirName):
'''
@param dirName - will make a directory with this name, aka, this must be a new directory
'''
#strip any tail '/'
if dirName[-1] == '/':
dirName = dirName[0:-1]
if os.path.exists(dirName):
if len(glob.glob(dirName+'/*')) != 0:
raise ap.ArgumentTypeError("Non-empty directory already exists: '%s'" % dirName)
else:
#this can raise it's own exception
os.makedirs(dirName)
return dirName
def existingDirectory(dirName):
'''
@param dirName - checks to make sure this directory already exists
TODO: add checks for the bwt files?
'''
#strip any tail '/'
if dirName[-1] == '/':
dirName = dirName[0:-1]
if os.path.isdir(dirName):
return dirName
else:
raise ap.ArgumentTypeError("Directory does not exist: '%s'" % dirName)
def newOrExistingDirectory(dirName):
'''
@param dirName - the directory could be pre-existing, if not it's created
'''
if dirName[-1] == '/':
dirName = dirName[0:-1]
if os.path.isdir(dirName):
return dirName
elif os.path.exists(dirName):
ap.ArgumentTypeError("'%s' exists but is not a directory" % dirName)
else:
os.makedirs(dirName)
return dirName
def validKmer(kmer):
'''
@param kmer - must be contained in the characters used for our sequencing
'''
for c in kmer:
if not (c in validCharacters):
raise ap.ArgumentTypeError("Invalid k-mer: All characters must be in ($, A, C, G, N, T)")
return kmer
def fastaIterator(fastaFN):
'''
Iterator that yields tuples containing a sequence label and the sequence itself
@param fastaFN - the FASTA filename to open and parse
@return - an iterator yielding tuples of the form (label, sequence) from the FASTA file
'''
if fastaFN[len(fastaFN)-3:] == '.gz':
fp = gzip.open(fastaFN, 'r')
else:
fp = open(fastaFN, 'r')
label = ''
segments = []
line = ''
for line in fp:
if line[0] == '>':
if label != '':
yield (label, ''.join(segments))
label = (line.strip('\n')[1:]).split(' ')[0]
segments = []
else:
segments.append(line.strip('\n'))
if label != '' and len(segments) > 0:
yield (label, ''.join(segments))
fp.close()
def fastqIterator(fastqFN):
if fastqFN[len(fastqFN)-3:] == '.gz':
fp = gzip.open(fastqFN, 'r')
else:
fp = open(fastqFN, 'r')
l1 = ''
seq = ''
l2 = ''
quals = ''
i = 0
for line in fp:
if i & 0x3 == 0:
l1 = line.strip('\n')
elif i & 0x3 == 1:
seq = line.strip('\n')
elif i & 0x3 == 2:
l2 = line.strip('\n')
else:
quals = line.strip('\n')
yield (l1, seq, l2, quals)
l1 = ''
seq = ''
l2 = ''
quals = ''
i += 1
fp.close()
|
mit
| -1,118,803,294,601,152,300
| 28.506024
| 124
| 0.569212
| false
| 3.719058
| false
| false
| false
|
giuseppe/virt-manager
|
virtManager/uihelpers.py
|
1
|
44094
|
#
# Copyright (C) 2009, 2013, 2014 Red Hat, Inc.
# Copyright (C) 2009 Cole Robinson <crobinso@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
import logging
import os
import statvfs
import pwd
# pylint: disable=E0611
from gi.repository import GObject
from gi.repository import Gtk
# pylint: enable=E0611
import libvirt
import virtinst
from virtManager import config
OPTICAL_DEV_PATH = 0
OPTICAL_LABEL = 1
OPTICAL_IS_MEDIA_PRESENT = 2
OPTICAL_DEV_KEY = 3
OPTICAL_MEDIA_KEY = 4
OPTICAL_IS_VALID = 5
try:
import gi
gi.check_version("3.7.4")
can_set_row_none = True
except (ValueError, AttributeError):
can_set_row_none = False
vm_status_icons = {
libvirt.VIR_DOMAIN_BLOCKED: "state_running",
libvirt.VIR_DOMAIN_CRASHED: "state_shutoff",
libvirt.VIR_DOMAIN_PAUSED: "state_paused",
libvirt.VIR_DOMAIN_RUNNING: "state_running",
libvirt.VIR_DOMAIN_SHUTDOWN: "state_shutoff",
libvirt.VIR_DOMAIN_SHUTOFF: "state_shutoff",
libvirt.VIR_DOMAIN_NOSTATE: "state_running",
# VIR_DOMAIN_PMSUSPENDED
7: "state_paused",
}
############################################################
# Helpers for shared storage UI between create/addhardware #
############################################################
def set_sparse_tooltip(widget):
sparse_str = _("Fully allocating storage may take longer now, "
"but the OS install phase will be quicker. \n\n"
"Skipping allocation can also cause space issues on "
"the host machine, if the maximum image size exceeds "
"available storage space. \n\n"
"Tip: Storage format qcow2 and qed "
"do not support full allocation.")
widget.set_tooltip_text(sparse_str)
def host_disk_space(conn):
pool = get_default_pool(conn)
path = get_default_dir(conn)
avail = 0
if pool and pool.is_active():
# FIXME: make sure not inactive?
# FIXME: use a conn specific function after we send pool-added
pool.refresh()
avail = int(pool.get_available())
elif not conn.is_remote() and os.path.exists(path):
vfs = os.statvfs(os.path.dirname(path))
avail = vfs[statvfs.F_FRSIZE] * vfs[statvfs.F_BAVAIL]
return float(avail / 1024.0 / 1024.0 / 1024.0)
def update_host_space(conn, widget):
try:
max_storage = host_disk_space(conn)
except:
logging.exception("Error determining host disk space")
return
def pretty_storage(size):
return "%.1f GB" % float(size)
hd_label = ("%s available in the default location" %
pretty_storage(max_storage))
hd_label = ("<span color='#484848'>%s</span>" % hd_label)
widget.set_markup(hd_label)
def check_default_pool_active(err, conn):
default_pool = get_default_pool(conn)
if default_pool and not default_pool.is_active():
res = err.yes_no(_("Default pool is not active."),
_("Storage pool '%s' is not active. "
"Would you like to start the pool "
"now?") % default_pool.get_name())
if not res:
return False
# Try to start the pool
try:
default_pool.start()
logging.info("Started pool '%s'", default_pool.get_name())
except Exception, e:
return err.show_err(_("Could not start storage_pool "
"'%s': %s") %
(default_pool.get_name(), str(e)))
return True
#####################################################
# Hardware model list building (for details, addhw) #
#####################################################
def set_combo_text_column(combo, col):
if combo.get_has_entry():
combo.set_entry_text_column(col)
else:
text = Gtk.CellRendererText()
combo.pack_start(text, True)
combo.add_attribute(text, 'text', col)
def build_video_combo(vm, combo, no_default=None):
model = Gtk.ListStore(str, str)
combo.set_model(model)
set_combo_text_column(combo, 1)
combo.get_model().set_sort_column_id(1, Gtk.SortType.ASCENDING)
populate_video_combo(vm, combo, no_default)
def populate_video_combo(vm, combo, no_default=None):
model = combo.get_model()
has_spice = bool([g for g in vm.get_graphics_devices()
if g.type == g.TYPE_SPICE])
has_qxl = bool([v for v in vm.get_video_devices()
if v.model == "qxl"])
model.clear()
tmpdev = virtinst.VirtualVideoDevice(vm.conn.get_backend())
for m in tmpdev.MODELS:
if vm.stable_defaults():
if m == "qxl" and not has_spice and not has_qxl:
# Only list QXL video option when VM has SPICE video
continue
if m == tmpdev.MODEL_DEFAULT and no_default:
continue
model.append([m, tmpdev.pretty_model(m)])
if len(model) > 0:
combo.set_active(0)
def build_sound_combo(vm, combo, no_default=False):
model = Gtk.ListStore(str)
combo.set_model(model)
set_combo_text_column(combo, 0)
model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
stable_defaults = vm.stable_defaults()
stable_soundmodels = ["ich6", "ac97"]
for m in virtinst.VirtualAudio.MODELS:
if m == virtinst.VirtualAudio.MODEL_DEFAULT and no_default:
continue
if (stable_defaults and m not in stable_soundmodels):
continue
model.append([m])
if len(model) > 0:
combo.set_active(0)
def build_watchdogmodel_combo(vm, combo, no_default=False):
ignore = vm
model = Gtk.ListStore(str)
combo.set_model(model)
set_combo_text_column(combo, 0)
model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
for m in virtinst.VirtualWatchdog.MODELS:
if m == virtinst.VirtualAudio.MODEL_DEFAULT and no_default:
continue
model.append([m])
if len(model) > 0:
combo.set_active(0)
def build_watchdogaction_combo(vm, combo, no_default=False):
ignore = vm
model = Gtk.ListStore(str, str)
combo.set_model(model)
set_combo_text_column(combo, 1)
model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
for m in virtinst.VirtualWatchdog.ACTIONS:
if m == virtinst.VirtualWatchdog.ACTION_DEFAULT and no_default:
continue
model.append([m, virtinst.VirtualWatchdog.get_action_desc(m)])
if len(model) > 0:
combo.set_active(0)
def build_source_mode_combo(vm, combo):
model = Gtk.ListStore(str, str)
combo.set_model(model)
set_combo_text_column(combo, 1)
populate_source_mode_combo(vm, combo)
combo.set_active(0)
def populate_source_mode_combo(vm, combo):
ignore = vm
model = combo.get_model()
model.clear()
# [xml value, label]
model.append([None, "Default"])
model.append(["vepa", "VEPA"])
model.append(["bridge", "Bridge"])
model.append(["private", "Private"])
model.append(["passthrough", "Passthrough"])
def build_smartcard_mode_combo(vm, combo):
model = Gtk.ListStore(str, str)
combo.set_model(model)
set_combo_text_column(combo, 1)
model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
populate_smartcard_mode_combo(vm, combo)
idx = -1
for rowid in range(len(combo.get_model())):
idx = 0
row = combo.get_model()[rowid]
if row[0] == virtinst.VirtualSmartCardDevice.MODE_DEFAULT:
idx = rowid
break
combo.set_active(idx)
def populate_smartcard_mode_combo(vm, combo):
ignore = vm
model = combo.get_model()
model.clear()
# [xml value, label]
model.append(["passthrough", "Passthrough"])
model.append(["host", "Host"])
def build_redir_type_combo(vm, combo):
model = Gtk.ListStore(str, str, bool)
combo.set_model(model)
set_combo_text_column(combo, 1)
populate_redir_type_combo(vm, combo)
combo.set_active(0)
def populate_redir_type_combo(vm, combo):
ignore = vm
model = combo.get_model()
model.clear()
# [xml value, label, conn details]
model.append(["spicevmc", "Spice channel", False])
model.append(["tcp", "TCP", True])
def build_tpm_type_combo(vm, combo):
model = Gtk.ListStore(str, str)
combo.set_model(model)
set_combo_text_column(combo, 1)
model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
populate_tpm_type_combo(vm, combo)
idx = -1
for rowid in range(len(combo.get_model())):
idx = 0
row = combo.get_model()[rowid]
if row[0] == virtinst.VirtualTPMDevice.TYPE_DEFAULT:
idx = rowid
break
combo.set_active(idx)
def populate_tpm_type_combo(vm, combo):
ignore = vm
types = combo.get_model()
types.clear()
# [xml value, label]
for t in virtinst.VirtualTPMDevice.TYPES:
types.append([t, virtinst.VirtualTPMDevice.get_pretty_type(t)])
def build_netmodel_combo(vm, combo):
model = Gtk.ListStore(str, str)
combo.set_model(model)
set_combo_text_column(combo, 1)
model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
populate_netmodel_combo(vm, combo)
combo.set_active(0)
def populate_netmodel_combo(vm, combo):
model = combo.get_model()
model.clear()
# [xml value, label]
model.append([None, _("Hypervisor default")])
if vm.is_hvm():
mod_list = ["rtl8139", "ne2k_pci", "pcnet", "e1000"]
if vm.get_hv_type() in ["kvm", "qemu", "test"]:
mod_list.append("virtio")
if (vm.get_hv_type() == "kvm" and
vm.get_machtype() == "pseries"):
mod_list.append("spapr-vlan")
if vm.get_hv_type() in ["xen", "test"]:
mod_list.append("netfront")
mod_list.sort()
for m in mod_list:
model.append([m, m])
def build_cache_combo(vm, combo):
ignore = vm
model = Gtk.ListStore(str, str)
combo.set_model(model)
set_combo_text_column(combo, 1)
combo.set_active(-1)
for m in virtinst.VirtualDisk.cache_types:
model.append([m, m])
_iter = model.insert(0, [None, "default"])
combo.set_active_iter(_iter)
def build_io_combo(vm, combo, no_default=False):
ignore = vm
model = Gtk.ListStore(str, str)
combo.set_model(model)
set_combo_text_column(combo, 1)
model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
combo.set_active(-1)
for m in virtinst.VirtualDisk.io_modes:
model.append([m, m])
if not no_default:
model.append([None, "default"])
combo.set_active(0)
def build_disk_bus_combo(vm, combo, no_default=False):
ignore = vm
model = Gtk.ListStore(str, str)
combo.set_model(model)
set_combo_text_column(combo, 1)
model.set_sort_column_id(1, Gtk.SortType.ASCENDING)
if not no_default:
model.append([None, "default"])
combo.set_active(-1)
def build_vnc_keymap_combo(vm, combo, no_default=False):
ignore = vm
model = Gtk.ListStore(str, str)
combo.set_model(model)
set_combo_text_column(combo, 1)
if not no_default:
model.append([None, "default"])
else:
model.append([None, "Auto"])
model.append([virtinst.VirtualGraphics.KEYMAP_LOCAL,
"Copy local keymap"])
for k in virtinst.VirtualGraphics.valid_keymaps():
model.append([k, k])
combo.set_active(-1)
#####################################
# Storage format list/combo helpers #
#####################################
def update_storage_format_combo(vm, combo, create):
model = Gtk.ListStore(str)
combo.set_model(model)
set_combo_text_column(combo, 0)
formats = ["raw", "qcow2", "qed"]
no_create_formats = []
if not vm.stable_defaults():
formats.append("vmdk")
no_create_formats.append("vdi")
for m in formats:
model.append([m])
if not create:
for m in no_create_formats:
model.append([m])
if create:
combo.set_active(0)
#######################################################################
# Widgets for listing network device options (in create, addhardware) #
#######################################################################
def pretty_network_desc(nettype, source=None, netobj=None):
if nettype == virtinst.VirtualNetworkInterface.TYPE_USER:
return _("Usermode networking")
extra = None
if nettype == virtinst.VirtualNetworkInterface.TYPE_BRIDGE:
ret = _("Bridge")
elif nettype == virtinst.VirtualNetworkInterface.TYPE_VIRTUAL:
ret = _("Virtual network")
if netobj:
extra = ": %s" % netobj.pretty_forward_mode()
else:
ret = nettype.capitalize()
if source:
ret += " '%s'" % source
if extra:
ret += " %s" % extra
return ret
def init_network_list(net_list, bridge_box, source_mode_combo=None,
vport_expander=None):
# [ network type, source name, label, sensitive?, net is active,
# manual bridge, net instance]
net_model = Gtk.ListStore(str, str, str, bool, bool, bool, object)
net_list.set_model(net_model)
net_list.connect("changed", net_list_changed, bridge_box,
source_mode_combo, vport_expander)
text = Gtk.CellRendererText()
net_list.pack_start(text, True)
net_list.add_attribute(text, 'text', 2)
net_list.add_attribute(text, 'sensitive', 3)
def net_list_changed(net_list, bridge_box,
source_mode_combo, vport_expander):
active = net_list.get_active()
if active < 0:
return
if not bridge_box:
return
row = net_list.get_model()[active]
if source_mode_combo is not None:
doshow = (row[0] == virtinst.VirtualNetworkInterface.TYPE_DIRECT)
set_grid_row_visible(source_mode_combo, doshow)
vport_expander.set_visible(doshow)
show_bridge = row[5]
set_grid_row_visible(bridge_box, show_bridge)
def get_network_selection(net_list, bridge_entry):
idx = net_list.get_active()
if idx == -1:
return None, None
row = net_list.get_model()[net_list.get_active()]
net_type = row[0]
net_src = row[1]
net_check_bridge = row[5]
if net_check_bridge and bridge_entry:
net_type = virtinst.VirtualNetworkInterface.TYPE_BRIDGE
net_src = bridge_entry.get_text()
return net_type, net_src
def populate_network_list(net_list, conn, show_direct_interfaces=True):
model = net_list.get_model()
model.clear()
vnet_bridges = []
vnet_dict = {}
bridge_dict = {}
iface_dict = {}
def build_row(nettype, name, label, is_sensitive, is_running,
manual_bridge=False, key=None):
return [nettype, name, label,
is_sensitive, is_running, manual_bridge,
key]
def set_active(idx):
net_list.set_active(idx)
def add_dict(indict, model):
keylist = indict.keys()
keylist.sort()
rowlist = [indict[k] for k in keylist]
for row in rowlist:
model.append(row)
# For qemu:///session
if conn.is_qemu_session():
nettype = virtinst.VirtualNetworkInterface.TYPE_USER
r = build_row(nettype, None, pretty_network_desc(nettype), True, True)
model.append(r)
set_active(0)
return
hasNet = False
netIdxLabel = None
# Virtual Networks
for uuid in conn.list_net_uuids():
net = conn.get_net(uuid)
nettype = virtinst.VirtualNetworkInterface.TYPE_VIRTUAL
label = pretty_network_desc(nettype, net.get_name(), net)
if not net.is_active():
label += " (%s)" % _("Inactive")
hasNet = True
# FIXME: Should we use 'default' even if it's inactive?
# FIXME: This preference should be configurable
if net.get_name() == "default":
netIdxLabel = label
vnet_dict[label] = build_row(nettype, net.get_name(), label, True,
net.is_active(), key=net.get_uuid())
# Build a list of vnet bridges, so we know not to list them
# in the physical interface list
vnet_bridge = net.get_bridge_device()
if vnet_bridge:
vnet_bridges.append(vnet_bridge)
if not hasNet:
label = _("No virtual networks available")
vnet_dict[label] = build_row(None, None, label, False, False)
vnet_taps = []
for vm in conn.vms.values():
for nic in vm.get_network_devices(refresh_if_nec=False):
if nic.target_dev and nic.target_dev not in vnet_taps:
vnet_taps.append(nic.target_dev)
skip_ifaces = ["lo"]
# Physical devices
hasShared = False
brIdxLabel = None
for name in conn.list_net_device_paths():
br = conn.get_net_device(name)
bridge_name = br.get_bridge()
nettype = virtinst.VirtualNetworkInterface.TYPE_BRIDGE
if ((bridge_name in vnet_bridges) or
(br.get_name() in vnet_bridges) or
(br.get_name() in vnet_taps) or
(br.get_name() in [v + "-nic" for v in vnet_bridges]) or
(br.get_name() in skip_ifaces)):
# Don't list this, as it is basically duplicating virtual net info
continue
if br.is_shared():
sensitive = True
if br.get_bridge():
hasShared = True
brlabel = "(%s)" % pretty_network_desc(nettype, bridge_name)
else:
bridge_name = name
brlabel = _("(Empty bridge)")
else:
if (show_direct_interfaces and
conn.check_support(
conn.SUPPORT_CONN_DIRECT_INTERFACE)):
sensitive = True
nettype = virtinst.VirtualNetworkInterface.TYPE_DIRECT
bridge_name = name
brlabel = ": %s" % _("macvtap")
else:
sensitive = False
brlabel = "(%s)" % _("Not bridged")
label = _("Host device %s %s") % (br.get_name(), brlabel)
if hasShared and not brIdxLabel:
brIdxLabel = label
row = build_row(nettype, bridge_name, label, sensitive, True,
key=br.get_name())
if sensitive:
bridge_dict[label] = row
else:
iface_dict[label] = row
add_dict(bridge_dict, model)
add_dict(vnet_dict, model)
add_dict(iface_dict, model)
# If there is a bridge device, default to that
# If not, use 'default' network
# If not present, use first list entry
# If list empty, use no network devices
return_warn = False
label = brIdxLabel or netIdxLabel
for idx in range(len(model)):
row = model[idx]
is_inactive = not row[4]
if label:
if row[2] == label:
default = idx
return_warn = is_inactive
break
else:
if row[3] is True:
default = idx
return_warn = is_inactive
break
else:
return_warn = True
row = build_row(None, None, _("No networking"), True, False)
model.insert(0, row)
default = 0
# After all is said and done, add a manual bridge option
manual_row = build_row(None, None, _("Specify shared device name"),
True, False, manual_bridge=True)
model.append(manual_row)
set_active(default)
return return_warn
def validate_network(err, conn, nettype, devname, macaddr, model=None):
net = None
if nettype is None:
return None
# Make sure VirtualNetwork is running
netobj = None
if nettype == virtinst.VirtualNetworkInterface.TYPE_VIRTUAL:
for net in conn.nets.values():
if net.get_name() == devname:
netobj = net
break
if netobj and not netobj.is_active():
res = err.yes_no(_("Virtual Network is not active."),
_("Virtual Network '%s' is not active. "
"Would you like to start the network "
"now?") % devname)
if not res:
return False
# Try to start the network
try:
netobj.start()
netobj.tick()
logging.info("Started network '%s'", devname)
except Exception, e:
return err.show_err(_("Could not start virtual network "
"'%s': %s") % (devname, str(e)))
# Create network device
try:
net = virtinst.VirtualNetworkInterface(conn.get_backend())
net.type = nettype
net.source = devname
net.macaddr = macaddr
net.model = model
if net.model == "spapr-vlan":
net.address.set_addrstr("spapr-vio")
except Exception, e:
return err.val_err(_("Error with network parameters."), e)
# Make sure there is no mac address collision
isfatal, errmsg = net.is_conflict_net(conn.get_backend(), net.macaddr)
if isfatal:
return err.val_err(_("Mac address collision."), errmsg)
elif errmsg is not None:
retv = err.yes_no(_("Mac address collision."),
_("%s Are you sure you want to use this "
"address?") % errmsg)
if not retv:
return False
return net
############################################
# Populate media widget (choosecd, create) #
############################################
def init_mediadev_combo(widget):
# [Device path, pretty label, has_media?, device key, media key,
# vmmMediaDevice, is valid device]
model = Gtk.ListStore(str, str, bool, str, str, bool)
widget.set_model(model)
model.clear()
text = Gtk.CellRendererText()
widget.pack_start(text, True)
widget.add_attribute(text, 'text', OPTICAL_LABEL)
widget.add_attribute(text, 'sensitive', OPTICAL_IS_VALID)
def populate_mediadev_combo(conn, widget, devtype):
sigs = []
model = widget.get_model()
model.clear()
set_mediadev_default(model)
sigs.append(conn.connect("mediadev-added", mediadev_added, widget, devtype))
sigs.append(conn.connect("mediadev-removed", mediadev_removed, widget))
widget.set_active(-1)
mediadev_set_default_selection(widget)
return sigs
def set_mediadev_default(model):
if len(model) == 0:
model.append([None, _("No device present"), False, None, None, False])
def set_row_from_object(row, obj):
row[OPTICAL_DEV_PATH] = obj.get_path()
row[OPTICAL_LABEL] = obj.pretty_label()
row[OPTICAL_IS_MEDIA_PRESENT] = obj.has_media()
row[OPTICAL_DEV_KEY] = obj.get_key()
row[OPTICAL_MEDIA_KEY] = obj.get_media_key()
row[OPTICAL_IS_VALID] = True
def mediadev_removed(ignore_helper, key, widget):
model = widget.get_model()
active = widget.get_active()
idx = 0
for row in model:
if row[OPTICAL_DEV_KEY] == key:
# Whole device removed
del(model[idx])
if idx > active and active != -1:
widget.set_active(active - 1)
elif idx == active:
widget.set_active(-1)
idx += 1
set_mediadev_default(model)
mediadev_set_default_selection(widget)
def mediadev_added(ignore_helper, newobj, widget, devtype):
model = widget.get_model()
if newobj.get_media_type() != devtype:
return
if model is None:
return
if len(model) == 1 and model[0][OPTICAL_IS_VALID] is False:
# Only entry is the 'No device' entry
model.clear()
newobj.connect("media-added", mediadev_media_changed, widget)
newobj.connect("media-removed", mediadev_media_changed, widget)
# Brand new device
row = [None, None, None, None, None, None]
set_row_from_object(row, newobj)
model.append(row)
mediadev_set_default_selection(widget)
def mediadev_media_changed(newobj, widget):
model = widget.get_model()
active = widget.get_active()
idx = 0
# Search for the row with matching device node and
# fill in info about inserted media. If model has no current
# selection, select the new media.
for row in model:
if row[OPTICAL_DEV_PATH] == newobj.get_path():
set_row_from_object(row, newobj)
has_media = row[OPTICAL_IS_MEDIA_PRESENT]
if has_media and active == -1:
widget.set_active(idx)
elif not has_media and active == idx:
widget.set_active(-1)
idx = idx + 1
mediadev_set_default_selection(widget)
def mediadev_set_default_selection(widget):
# Set the first active cdrom device as selected, otherwise none
model = widget.get_model()
idx = 0
active = widget.get_active()
if active != -1:
# already a selection, don't change it
return
for row in model:
if row[OPTICAL_IS_MEDIA_PRESENT] is True:
widget.set_active(idx)
return
idx += 1
widget.set_active(-1)
####################################################################
# Build toolbar shutdown button menu (manager and details toolbar) #
####################################################################
class _VMMenu(Gtk.Menu):
# pylint: disable=E1101
# pylint can't detect functions we inheirit from Gtk, ex self.add
def __init__(self, src, current_vm_cb, show_open=True):
Gtk.Menu.__init__(self)
self._parent = src
self._current_vm_cb = current_vm_cb
self._show_open = show_open
self._init_state()
def _add_action(self, label, signal,
iconname="system-shutdown", addcb=True):
if label.startswith("gtk-"):
item = Gtk.ImageMenuItem.new_from_stock(label, None)
else:
item = Gtk.ImageMenuItem.new_with_mnemonic(label)
if iconname:
if iconname.startswith("gtk-"):
icon = Gtk.Image.new_from_stock(iconname, Gtk.IconSize.MENU)
else:
icon = Gtk.Image.new_from_icon_name(iconname,
Gtk.IconSize.MENU)
item.set_image(icon)
item.vmm_widget_name = signal
if addcb:
item.connect("activate", self._action_cb)
self.add(item)
return item
def _action_cb(self, src):
vm = self._current_vm_cb()
if not vm:
return
self._parent.emit("action-%s-domain" % src.vmm_widget_name,
vm.conn.get_uri(), vm.get_uuid())
def _init_state(self):
raise NotImplementedError()
def update_widget_states(self, vm):
raise NotImplementedError()
class VMShutdownMenu(_VMMenu):
# pylint: disable=E1101
# pylint can't detect functions we inheirit from Gtk, ex self.add
def _init_state(self):
self._add_action(_("_Reboot"), "reboot")
self._add_action(_("_Shut Down"), "shutdown")
self._add_action(_("F_orce Reset"), "reset")
self._add_action(_("_Force Off"), "destroy")
self.add(Gtk.SeparatorMenuItem())
self._add_action(_("Sa_ve"), "save", iconname=Gtk.STOCK_SAVE)
self.show_all()
def update_widget_states(self, vm):
statemap = {
"reboot": bool(vm and vm.is_stoppable()),
"shutdown": bool(vm and vm.is_stoppable()),
"reset": bool(vm and vm.is_stoppable()),
"save": bool(vm and vm.is_destroyable()),
"destroy": bool(vm and vm.is_destroyable()),
}
for child in self.get_children():
name = getattr(child, "vmm_widget_name", None)
if name in statemap:
child.set_sensitive(statemap[name])
class VMActionMenu(_VMMenu):
# pylint: disable=E1101
# pylint can't detect functions we inheirit from Gtk, ex self.add
def _init_state(self):
self._add_action(_("_Run"), "run", Gtk.STOCK_MEDIA_PLAY)
self._add_action(_("_Pause"), "suspend", Gtk.STOCK_MEDIA_PAUSE)
self._add_action(_("R_esume"), "resume", Gtk.STOCK_MEDIA_PAUSE)
s = self._add_action(_("_Shut Down"), "shutdown", addcb=False)
s.set_submenu(VMShutdownMenu(self._parent, self._current_vm_cb))
self.add(Gtk.SeparatorMenuItem())
self._add_action(_("Clone..."), "clone", None)
self._add_action(_("Migrate..."), "migrate", None)
self._add_action(_("_Delete"), "delete", Gtk.STOCK_DELETE)
if self._show_open:
self.add(Gtk.SeparatorMenuItem())
self._add_action(Gtk.STOCK_OPEN, "show", None)
self.show_all()
def update_widget_states(self, vm):
statemap = {
"run": bool(vm and vm.is_runable()),
"shutdown": bool(vm and vm.is_stoppable()),
"suspend": bool(vm and vm.is_stoppable()),
"resume": bool(vm and vm.is_paused()),
"migrate": bool(vm and vm.is_stoppable()),
"clone": bool(vm and not vm.is_read_only()),
}
vismap = {
"suspend": bool(vm and not vm.is_paused()),
"resume": bool(vm and vm.is_paused()),
}
for child in self.get_children():
name = getattr(child, "vmm_widget_name", None)
if hasattr(child, "update_widget_states"):
child.update_widget_states(vm)
if name in statemap:
child.set_sensitive(statemap[name])
if name in vismap:
child.set_visible(vismap[name])
def change_run_text(self, text):
for child in self.get_children():
if getattr(child, "vmm_widget_name", None) == "run":
child.get_child().set_label(text)
#####################################
# Path permissions checker for qemu #
#####################################
def check_path_search_for_qemu(err, conn, path):
if conn.is_remote() or not conn.is_qemu_system():
return
user = config.running_config.default_qemu_user
for i in conn.caps.host.secmodels:
if i.model == "dac":
label = i.baselabels.get("kvm") or i.baselabels.get("qemu")
if not label:
continue
pwuid = pwd.getpwuid(int(label.split(":")[0].replace("+", "")))
if pwuid:
user = pwuid[0]
skip_paths = config.running_config.get_perms_fix_ignore()
broken_paths = virtinst.VirtualDisk.check_path_search_for_user(
conn.get_backend(),
path, user)
for p in broken_paths:
if p in skip_paths:
broken_paths.remove(p)
if not broken_paths:
return
logging.debug("No search access for dirs: %s", broken_paths)
resp, chkres = err.warn_chkbox(
_("The emulator may not have search permissions "
"for the path '%s'.") % path,
_("Do you want to correct this now?"),
_("Don't ask about these directories again."),
buttons=Gtk.ButtonsType.YES_NO)
if chkres:
config.running_config.add_perms_fix_ignore(broken_paths)
if not resp:
return
logging.debug("Attempting to correct permission issues.")
errors = virtinst.VirtualDisk.fix_path_search_for_user(conn.get_backend(),
path, user)
if not errors:
return
errmsg = _("Errors were encountered changing permissions for the "
"following directories:")
details = ""
for path, error in errors.items():
if path not in broken_paths:
continue
details += "%s : %s\n" % (path, error)
logging.debug("Permission errors:\n%s", details)
ignore, chkres = err.err_chkbox(errmsg, details,
_("Don't ask about these directories again."))
if chkres:
config.running_config.add_perms_fix_ignore(errors.keys())
######################################
# Interface startmode widget builder #
######################################
def build_startmode_combo(combo):
model = Gtk.ListStore(str)
combo.set_model(model)
set_combo_text_column(combo, 0)
model.append(["none"])
model.append(["onboot"])
model.append(["hotplug"])
#########################
# Console keycombo menu #
#########################
def build_keycombo_menu(cb):
menu = Gtk.Menu()
def make_item(name, combo):
item = Gtk.MenuItem.new_with_mnemonic(name)
item.connect("activate", cb, combo)
menu.add(item)
make_item("Ctrl+Alt+_Backspace", ["Control_L", "Alt_L", "BackSpace"])
make_item("Ctrl+Alt+_Delete", ["Control_L", "Alt_L", "Delete"])
menu.add(Gtk.SeparatorMenuItem())
for i in range(1, 13):
make_item("Ctrl+Alt+F_%d" % i, ["Control_L", "Alt_L", "F%d" % i])
menu.add(Gtk.SeparatorMenuItem())
make_item("_Printscreen", ["Print"])
menu.show_all()
return menu
#############
# Misc bits #
#############
def spin_get_helper(widget):
adj = widget.get_adjustment()
txt = widget.get_text()
try:
ret = int(txt)
except:
ret = adj.get_value()
return ret
def get_ideal_path_info(conn, name):
path = get_default_dir(conn)
suffix = ".img"
return (path, name, suffix)
def get_ideal_path(conn, name):
target, name, suffix = get_ideal_path_info(conn, name)
return os.path.join(target, name) + suffix
def get_default_pool(conn):
pool = None
for uuid in conn.list_pool_uuids():
p = conn.get_pool(uuid)
if p.get_name() == "default":
pool = p
return pool
def get_default_dir(conn):
pool = get_default_pool(conn)
if pool:
return pool.get_target_path()
else:
return config.running_config.get_default_image_dir(conn)
def get_default_path(conn, name, collidelist=None):
collidelist = collidelist or []
pool = get_default_pool(conn)
default_dir = get_default_dir(conn)
def path_exists(p):
return os.path.exists(p) or p in collidelist
if not pool:
# Use old generating method
origf = os.path.join(default_dir, name + ".img")
f = origf
n = 1
while path_exists(f) and n < 100:
f = os.path.join(default_dir, name +
"-" + str(n) + ".img")
n += 1
if path_exists(f):
f = origf
path = f
else:
target, ignore, suffix = get_ideal_path_info(conn, name)
# Sanitize collidelist to work with the collision checker
newcollidelist = []
for c in collidelist:
if c and os.path.dirname(c) == pool.get_target_path():
newcollidelist.append(os.path.basename(c))
path = virtinst.StorageVolume.find_free_name(
pool.get_backend(), name,
suffix=suffix, collidelist=newcollidelist)
path = os.path.join(target, path)
return path
def browse_local(parent, dialog_name, conn, start_folder=None,
_type=None, dialog_type=None,
confirm_func=None, browse_reason=None,
choose_button=None, default_name=None):
"""
Helper function for launching a filechooser
@param parent: Parent window for the filechooser
@param dialog_name: String to use in the title bar of the filechooser.
@param conn: vmmConnection used by calling class
@param start_folder: Folder the filechooser is viewing at startup
@param _type: File extension to filter by (e.g. "iso", "png")
@param dialog_type: Maps to FileChooserDialog 'action'
@param confirm_func: Optional callback function if file is chosen.
@param browse_reason: The vmmConfig.CONFIG_DIR* reason we are browsing.
If set, this will override the 'folder' parameter with the gconf
value, and store the user chosen path.
"""
# Initial setup
overwrite_confirm = False
if dialog_type is None:
dialog_type = Gtk.FileChooserAction.OPEN
if dialog_type == Gtk.FileChooserAction.SAVE:
if choose_button is None:
choose_button = Gtk.STOCK_SAVE
overwrite_confirm = True
if choose_button is None:
choose_button = Gtk.STOCK_OPEN
fcdialog = Gtk.FileChooserDialog(title=dialog_name,
parent=parent,
action=dialog_type,
buttons=(Gtk.STOCK_CANCEL,
Gtk.ResponseType.CANCEL,
choose_button,
Gtk.ResponseType.ACCEPT))
fcdialog.set_default_response(Gtk.ResponseType.ACCEPT)
if default_name:
fcdialog.set_current_name(default_name)
# If confirm is set, warn about a file overwrite
if confirm_func:
overwrite_confirm = True
fcdialog.connect("confirm-overwrite", confirm_func)
fcdialog.set_do_overwrite_confirmation(overwrite_confirm)
# Set file match pattern (ex. *.png)
if _type is not None:
pattern = _type
name = None
if type(_type) is tuple:
pattern = _type[0]
name = _type[1]
f = Gtk.FileFilter()
f.add_pattern("*." + pattern)
if name:
f.set_name(name)
fcdialog.set_filter(f)
# Set initial dialog folder
if browse_reason:
start_folder = config.running_config.get_default_directory(conn,
browse_reason)
if start_folder is not None:
if os.access(start_folder, os.R_OK):
fcdialog.set_current_folder(start_folder)
# Run the dialog and parse the response
ret = None
if fcdialog.run() == Gtk.ResponseType.ACCEPT:
ret = fcdialog.get_filename()
fcdialog.destroy()
# Store the chosen directory in gconf if necessary
if ret and browse_reason and not ret.startswith("/dev"):
config.running_config.set_default_directory(os.path.dirname(ret),
browse_reason)
return ret
def pretty_hv(gtype, domtype):
"""
Convert XML <domain type='foo'> and <os><type>bar</type>
into a more human relevant string.
"""
gtype = gtype.lower()
domtype = domtype.lower()
label = domtype
if domtype == "kvm":
if gtype == "xen":
label = "xenner"
elif domtype == "xen":
if gtype == "xen":
label = "xen (paravirt)"
elif gtype == "hvm":
label = "xen (fullvirt)"
elif domtype == "test":
if gtype == "xen":
label = "test (xen)"
elif gtype == "hvm":
label = "test (hvm)"
return label
def iface_in_use_by(conn, name):
use_str = ""
for i in conn.list_interface_names():
iface = conn.get_interface(i)
if name in iface.get_slave_names():
if use_str:
use_str += ", "
use_str += iface.get_name()
return use_str
def chkbox_helper(src, getcb, setcb, text1, text2=None,
alwaysrecord=False,
default=True,
chktext=_("Don't ask me again")):
"""
Helper to prompt user about proceeding with an operation
Returns True if the 'yes' or 'ok' button was selected, False otherwise
@alwaysrecord: Don't require user to select 'yes' to record chkbox value
@default: What value to return if getcb tells us not to prompt
"""
do_prompt = getcb()
if not do_prompt:
return default
res = src.err.warn_chkbox(text1=text1, text2=text2,
chktext=chktext,
buttons=Gtk.ButtonsType.YES_NO)
response, skip_prompt = res
if alwaysrecord or response:
setcb(not skip_prompt)
return response
def get_list_selection(widget):
selection = widget.get_selection()
active = selection.get_selected()
treestore, treeiter = active
if treeiter is not None:
return treestore[treeiter]
return None
def set_list_selection(widget, rownum):
path = str(rownum)
selection = widget.get_selection()
selection.unselect_all()
widget.set_cursor(path)
selection.select_path(path)
def set_row_selection(listwidget, prevkey):
model = listwidget.get_model()
_iter = None
if prevkey:
for row in model:
if row[0] == prevkey:
_iter = row.iter
break
if not _iter:
_iter = model.get_iter_first()
if hasattr(listwidget, "get_selection"):
selection = listwidget.get_selection()
cb = selection.select_iter
else:
selection = listwidget
cb = selection.set_active_iter
if _iter:
cb(_iter)
selection.emit("changed")
def child_get_property(parent, child, propname):
# Wrapper for child_get_property, which pygobject doesn't properly
# introspect
value = GObject.Value()
value.init(GObject.TYPE_INT)
parent.child_get_property(child, propname, value)
return value.get_int()
def set_grid_row_visible(child, visible):
# For the passed widget, find its parent GtkGrid, and hide/show all
# elements that are in the same row as it. Simplifies having to name
# every element in a row when we want to dynamically hide things
# based on UI interraction
parent = child.get_parent()
if not type(parent) is Gtk.Grid:
raise RuntimeError("Programming error, parent must be grid, "
"not %s" % type(parent))
row = child_get_property(parent, child, "top-attach")
for child in parent.get_children():
if child_get_property(parent, child, "top-attach") == row:
child.set_visible(visible)
def default_uri(always_system=False):
if os.path.exists('/var/lib/xend'):
if (os.path.exists('/dev/xen/evtchn') or
os.path.exists("/proc/xen")):
return 'xen:///'
if (os.path.exists("/usr/bin/qemu") or
os.path.exists("/usr/bin/qemu-kvm") or
os.path.exists("/usr/bin/kvm") or
os.path.exists("/usr/libexec/qemu-kvm")):
if always_system or os.geteuid() == 0:
return "qemu:///system"
else:
return "qemu:///session"
return None
def exception_is_libvirt_error(e, error):
return (hasattr(libvirt, error) and
e.get_error_code() == getattr(libvirt, error))
def log_redefine_xml_diff(obj, origxml, newxml):
objname = "<%s name=%s>" % (obj.__class__.__name__, obj.get_name())
if origxml == newxml:
logging.debug("Redefine requested for %s, but XML didn't change!",
objname)
return
import difflib
diff = "".join(difflib.unified_diff(origxml.splitlines(1),
newxml.splitlines(1),
fromfile="Original XML",
tofile="New XML"))
logging.debug("Redefining %s with XML diff:\n%s", objname, diff)
|
gpl-2.0
| 6,635,527,054,526,621,000
| 29.472702
| 80
| 0.576995
| false
| 3.694202
| false
| false
| false
|
edubecks/vaidecaronaorg
|
caronasbrasilapp/djangoapp/apps/caronasbrasil/robots/crawler.py
|
1
|
3567
|
# coding: utf-8
from pprint import pprint
import unidecode
from djangoapp.apps.caronasbrasil.model.caronasbrasil.carona_post import CaronaPost
from djangoapp.apps.caronasbrasil.model.fb_groups.fb_groups_controller import FBGroupsController
from djangoapp.apps.caronasbrasil.persistence_controller import PersistenceController
__author__ = 'edubecks'
class Crawler(object):
## default time_interval 1 week = 60min * 24h *7d
def __init__(self, time_interval=10080):
self.time_interval = time_interval
return
def log_not_parsed_post(self,carona_post):
PersistenceController().add_parser_error(carona_post.fb_group_id,
carona_post.fb_post_id, carona_post.content_clean)
return
def post_is_commented(self, message):
message_decoded = unidecode.unidecode(message)
return message_decoded[:2]=='//'
def retrieve_posts(self, fb_group_id):
## persistence
persistence = PersistenceController()
city1, city1_state, city1_list, city2, city2_state, city2_list = \
persistence.get_cities_by_fb_group_id(fb_group_id)
## getting feed
fb_manager = FBGroupsController(fb_group_id)
feed = fb_manager.get_posts(last_time_checked=self.time_interval)
for fb_post in feed:
## check if the post is not commented
if (not self.post_is_commented(fb_post['message'])
## check if it is already parsed
and not persistence.exists_post(fb_post['id'])):
# pprint(fb_post)
## create new carona post
carona_post = CaronaPost(fb_post)
pprint(carona_post.content_clean)
## setting origin and destiny
carona_post.city1 = city1
carona_post.city1_state = city1_state
carona_post.city2 = city2
carona_post.city2_state = city2_state
carona_post.city1_list = city1_list
carona_post.city2_list = city2_list
## date / time
has_date_tag = carona_post.retrieve_date_tags()
carona_post.retrieve_time_tags()
# has_time_interval = carona_post.retrieve_time_interval()
has_time_tag = True if carona_post.tag_time else False
## origin_destiny
has_origin_destiny = carona_post.retrieve_origin_destiny()
## oferecer/ procurar
has_ofereco_procuro = carona_post.retrieve_ofereco_procuro_tag()
## [OPTIONAL] numero de vagas
has_vagas = carona_post.retrieve_vagas()
## check the tag requirements
# print(has_date_tag, has_time_tag, has_origin_destiny, has_ofereco_procuro)
if has_date_tag and has_time_tag and has_origin_destiny and has_ofereco_procuro:
## saving in the db
# pprint(str(carona_post))
# pprint('---------------------')
persistence.add_carona(carona_post)
else:
print('*************** wrong')
pprint(carona_post.content_clean)
pprint(str(carona_post))
print('*******************************************')
self.log_not_parsed_post(carona_post)
else:
## TODO: call logger
pass
return
|
mit
| -4,877,079,740,951,469,000
| 37.771739
| 99
| 0.552285
| false
| 3.852052
| false
| false
| false
|
therewillbecode/ichnaea
|
ichnaea/api/views.py
|
1
|
5002
|
"""
Implementation of a API specific HTTP service view.
"""
import colander
import simplejson as json
import six
from ichnaea.api.exceptions import (
DailyLimitExceeded,
InvalidAPIKey,
ParseError,
)
from ichnaea.api.rate_limit import rate_limit_exceeded
from ichnaea.models.api import ApiKey
from ichnaea import util
from ichnaea.webapp.view import BaseView
if six.PY2: # pragma: no cover
from ipaddr import IPAddress as ip_address # NOQA
else: # pragma: no cover
from ipaddress import ip_address
class BaseAPIView(BaseView):
"""Common base class for all API related views."""
check_api_key = True #: Should API keys be checked?
error_on_invalidkey = True #: Deny access for invalid API keys?
metric_path = None #: Dotted URL path, for example v1.submit.
schema = None #: An instance of a colander schema to validate the data.
view_type = None #: The type of view, for example submit or locate.
def __init__(self, request):
super(BaseAPIView, self).__init__(request)
self.raven_client = request.registry.raven_client
self.redis_client = request.registry.redis_client
self.stats_client = request.registry.stats_client
def log_unique_ip(self, apikey_shortname):
try:
ip = str(ip_address(self.request.client_addr))
except ValueError: # pragma: no cover
ip = None
if ip:
redis_key = 'apiuser:{api_type}:{api_name}:{date}'.format(
api_type=self.view_type,
api_name=apikey_shortname,
date=util.utcnow().date().strftime('%Y-%m-%d'),
)
with self.redis_client.pipeline() as pipe:
pipe.pfadd(redis_key, ip)
pipe.expire(redis_key, 691200) # 8 days
pipe.execute()
def log_count(self, apikey_shortname, apikey_log):
self.stats_client.incr(
self.view_type + '.request',
tags=['path:' + self.metric_path,
'key:' + apikey_shortname])
if self.request.client_addr and apikey_log:
try:
self.log_unique_ip(apikey_shortname)
except Exception: # pragma: no cover
self.raven_client.captureException()
def check(self):
api_key = None
api_key_text = self.request.GET.get('key', None)
if api_key_text is None:
self.log_count('none', False)
if self.error_on_invalidkey:
raise InvalidAPIKey()
if api_key_text is not None:
try:
session = self.request.db_ro_session
api_key = session.query(ApiKey).get(api_key_text)
except Exception: # pragma: no cover
# if we cannot connect to backend DB, skip api key check
self.raven_client.captureException()
if api_key is not None:
self.log_count(api_key.name, api_key.log)
rate_key = 'apilimit:{key}:{time}'.format(
key=api_key_text,
time=util.utcnow().strftime('%Y%m%d')
)
should_limit = rate_limit_exceeded(
self.redis_client,
rate_key,
maxreq=api_key.maxreq
)
if should_limit:
raise DailyLimitExceeded()
else:
if api_key_text is not None:
self.log_count('invalid', False)
if self.error_on_invalidkey:
raise InvalidAPIKey()
# If we failed to look up an ApiKey, create an empty one
# rather than passing None through
api_key = api_key or ApiKey(valid_key=None)
return self.view(api_key)
def preprocess_request(self):
errors = []
request_content = self.request.body
if self.request.headers.get('Content-Encoding') == 'gzip':
# handle gzip self.request bodies
try:
request_content = util.decode_gzip(self.request.body)
except OSError as exc:
errors.append({'name': None, 'description': repr(exc)})
request_data = {}
try:
request_data = json.loads(
request_content, encoding=self.request.charset)
except ValueError as exc:
errors.append({'name': None, 'description': repr(exc)})
validated_data = {}
try:
validated_data = self.schema.deserialize(request_data)
except colander.Invalid as exc:
errors.append({'name': None, 'description': exc.asdict()})
if request_content and errors:
raise ParseError()
return (validated_data, errors)
def __call__(self):
"""Execute the view and return a response."""
if self.check_api_key:
return self.check()
else:
api_key = ApiKey(valid_key=None, allow_fallback=False, log=False)
return self.view(api_key)
|
apache-2.0
| -380,044,028,456,352,960
| 33.027211
| 77
| 0.57597
| false
| 4.0016
| false
| false
| false
|
jrheling/pid_controller
|
pid_controller/tests/PeakCounterTest.py
|
1
|
2050
|
#!/usr/bin/python
import PeakCounter
import unittest
import random
import time
class PeakCounterTest(unittest.TestCase):
def setUp(self):
self.PC = PeakCounter.PeakCounter()
def test_construct(self):
self.assertIsInstance(self.PC, PeakCounter.PeakCounter)
def test_initnumpeaks(self):
self.assertEquals(self.PC.num_peaks,0)
def test_fivepeaks(self):
self.PC = PeakCounter.PeakCounter(5)
self.assertEquals(self.PC._max_peaks,5)
def test_add_value(self):
"""docstring for test_add_value"""
self.PC.add_value(random.randint(0,100))
def test_set_lookback_sizeNaN(self):
with self.assertRaises(ValueError):
self.PC.lookback_size = "foo"
def test_set_lookback_sizeTooSmal(self):
with self.assertRaises(ValueError):
self.PC.lookback_size = 1
def test_set_lookback_size(self):
i = random.randint(2,85)
self.PC.lookback_size = i
self.assertEquals(self.PC.lookback_size,i)
def test_justInflexted(self):
# FIXME: implement
pass
def test_get_num_peaks(self):
# FIXME: implement
pass
def test_sequence1(self):
seq = [ 5, 1, 2, 4, 12, 8, 3, 6, 1.5, 4, 5.3, 8.7, 8.6, 0.7]
# peaks should be 5, 12, 8.7
for i in seq:
self.PC.add_value(i)
time.sleep(0.1)
self.assertEquals(self.PC.num_peaks,3)
self.assertEquals(self.PC.get_last_peaks(2),[12, 8.7])
self.assertEquals(self.PC.get_last_peaks(4),[5, 12, 8.7])
self.assertEquals(self.PC.get_last_peaks(3),[5, 12, 8.7])
self.assertEquals(self.PC.get_last_peaks(1),[8.7])
## last_peak_delta includes processing time, so we can't predict it precisely
self.assertTrue((self.PC.last_peak_delta - 0.7) < 0.005)
pass
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(PeakCounterTest)
unittest.TextTestRunner(verbosity=2).run(suite)
|
apache-2.0
| 9,204,944,022,240,669,000
| 29.61194
| 85
| 0.611707
| false
| 3.311793
| true
| false
| false
|
NorthernLightsDataLab/pyCFL
|
pyCFL/core.py
|
1
|
1309
|
import urllib.request, json, re
import pyCFL.config as cfg
class cflAPI(object):
def __init__(self):
self.base_url = 'http://api.cfl.ca/v1'
self._set_api_key()
def _get_games_data(self, season, game_id=None):
if game_id:
api_url = self.base_url + '/games/' + str(season) + '/game/' + str(game_id)
else:
api_url = self.base_url + '/games/' + str(season)
with urllib.request.urlopen(self._build_url(api_url)) as url:
data = json.loads(url.read().decode())
return(data)
def _get_play_by_play(self, season, game_id):
api_url = self.base_url + '/games/' + str(season) + '/game/' + str(game_id) + '?include=play_by_play'
with urllib.request.urlopen(self._build_url(api_url)) as url:
data = json.loads(url.read().decode())
return(data)
def _set_api_key(self):
self.api_key = cfg.Settings().api_key
print('api key is: {}'.format(self.api_key))
def _build_url(self, url):
try:
if re.search('\?', url):
url = url + '&key=' + self.api_key
else:
url = url + '?key=' + self.api_key
except:
print("API must be set first using _set_api_key('YOUR_API_KEY')")
return(url)
|
mit
| 3,525,122,950,929,481,700
| 34.378378
| 109
| 0.537051
| false
| 3.240099
| false
| false
| false
|
vroomfondle/podi
|
lib/podi/util/util.py
|
1
|
4084
|
"""
Podi, a command-line interface for Kodi.
Copyright (C) 2015 Peter Frost <slimeypete@gmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from ..rpc.library import list_episodes, list_tv_shows, list_movies
SORT_ASC = 1
SORT_DESC = 2
def retrieve_sorted_episodes(rpc, tv_show_id, sort_field='episodeid'):
"""
Sends a JSON RPC call to retrieve a list of episodes for the given show.
:param rpc A callable which will send the JSONRPC request to the Kodi server
:param tv_show_id The id of the target show
"""
episodes = rpc(list_episodes(tv_show_id)).get(
'episodes', [])
for episode in sorted(
episodes,
key=lambda episode: episode[sort_field]):
yield episode
def retrieve_sorted_movies(rpc, sort_field='movieid', filters=None):
"""
Sends a JSON RPC call to retrieve a list of movies.
:param rpc A callable which will send the JSONRPC request to the Kodi server
"""
movies = rpc(list_movies(filters=filters)).get('movies', [])
for movie in sorted(
movies,
key=lambda movie: movie[sort_field]):
yield movie
def retrieve_sorted_shows(rpc, tv_show_id=None, sort_field='tvshowid'):
"""
Sends a JSON RPC call to retrieve a list of TV shows.
:param rpc A callable which will send the JSONRPC request to the Kodi server.
:param tv_show_id If set, restrict the list to a single id.
"""
shows = rpc(list_tv_shows()).get('tvshows', [])
for show in sorted(shows, key=lambda show: show[sort_field]):
if (tv_show_id is None) or int(show['tvshowid']) == int(tv_show_id):
yield show
def list_to_dicts(key, input_list):
"""
Turns a list of values into a list of single-entry dicts, with the provided key,
so that the dicts can be used with pystache. The list is modified in-place.
"""
for index in range(len(input_list)):
input_list[index] = {key: input_list[index]}
def align_fields_for_display(items, fields):
"""
Pads/truncates fields in each item to the specified length and puts the result in index ('display'+field_name).
:param fields A list of tuples (str,int): (field_name, length).
:returns the input list with padded items.
"""
for item in items:
for (field_name, length) in fields:
if isinstance(item[field_name], str) or isinstance(item[field_name], str):
field_value = item[field_name]
else:
field_value = str(item[field_name])
item['display{0}'.format(field_name)] = field_value[
0:length - 1].ljust(length)
return items
def format_runtime(video_item):
"""
Finds the longest video stream in a given item, and returns a dict:
{'total_seconds':n, 'hours':n, 'minutes':n, 'seconds':n, 'str':"{hours}:{minutes}:{seconds}"}.
If the 'streamdetails' sub-dict is entirely missing, expect to see an IndexError.
:param video_item An item as returned in response to the JSON defined by the lib.podi.rpc.library methods
Should include a sub-dict called 'streamdetails'.
"""
minutes, seconds = divmod(int(video_item['runtime']), 60)
hours, minutes = divmod(minutes, 60)
return {
'total_seconds': video_item['runtime'],
'hours': hours,
'minutes': minutes,
'seconds': seconds,
'str': "{0:02d}:{1:02d}:{2:02d}".format(hours, minutes, seconds),
}
|
gpl-3.0
| -5,527,879,337,605,631,000
| 36.46789
| 115
| 0.652057
| false
| 3.813259
| false
| false
| false
|
h-mayorquin/camp_india_2016
|
tutorials/chemical switches/moose/neuroml/GranuleCell/Granule98.py
|
1
|
2209
|
## Aditya Gilra, NCBS, Bangalore, 2012
"""
Inside the .../moose-examples/GranuleCell/ directory supplied with MOOSE, run
python testNeuroML_Gran98.py
(other channels and morph xml files are already present in this same directory).
The soma name below is hard coded for gran98, else any other file can be used by modifying this script.
"""
#import os
#os.environ['NUMPTHREADS'] = '1'
#import sys
#sys.path.append('../../../python')
import moose
from moose.utils import *
from moose.neuroml.NeuroML import NeuroML
from pylab import *
simdt = 1e-6 # s
plotdt = 10e-6 # s
runtime = 0.7 # s
def loadGran98NeuroML_L123(filename):
neuromlR = NeuroML()
populationDict, projectionDict = \
neuromlR.readNeuroMLFromFile(filename)
# readNeuroMLFromFile returns populationDict = { 'populationname1':(cellname,{int(instanceid1):moosecell, ... }) , ... }
# and projectionDict = { 'projectionname1':(source,target,[(syn_name1,pre_seg_path,post_seg_path),...]) , ... }
soma_path = populationDict['Gran'][1][0].path+'/Soma_0'
somaVm = setupTable('somaVm',moose.Compartment(soma_path),'Vm')
somaCa = setupTable('somaCa',moose.CaConc(soma_path+'/Gran_CaPool_98'),'Ca')
somaIKCa = setupTable('somaIKCa',moose.HHChannel(soma_path+'/Gran_KCa_98'),'Gk')
#KDrX = setupTable('ChanX',moose.HHChannel(soma_path+'/Gran_KDr_98'),'X')
soma = moose.Compartment(soma_path)
print "Reinit MOOSE ... "
resetSim(['/elec','/cells'],simdt,plotdt,simmethod='ee') # from moose.utils
print "Running ... "
moose.start(runtime)
# plotting
tvec = arange(0.0,runtime,plotdt)
plot(tvec,somaVm.vector[1:])
title('Soma Vm')
xlabel('time (s)')
ylabel('Voltage (V)')
figure()
plot(tvec,somaCa.vector[1:])
title('Soma Ca')
xlabel('time (s)')
ylabel('Ca conc (mol/m^3)')
figure()
plot(tvec,somaIKCa.vector[1:])
title('KCa current (A)')
xlabel('time (s)')
ylabel('')
print "Showing plots ..."
show()
filename = "GranuleCell.net.xml"
if __name__ == "__main__":
if len(sys.argv)<2:
filename = "GranuleCell.net.xml"
else:
filename = sys.argv[1]
loadGran98NeuroML_L123(filename)
|
mit
| -6,429,822,865,173,721,000
| 31.485294
| 124
| 0.650521
| false
| 2.832051
| false
| false
| false
|
jricardo27/travelhelper
|
old/core/lpparser.py
|
1
|
4319
|
__author__ = 'ricardo'
"""
Parse Lonely Planet pages
"""
import re
import urllib2
from collections import OrderedDict
from bs4 import BeautifulSoup
def get_text(elem):
"""
Return the element's text encoded in utf-8
"""
return '\n'.join(elem.stripped_strings).encode('utf8')
def parse_sight_index(index_url):
"""
Return all the links found in the sight page
"""
index = 1
sight_urls = []
while True:
url = '{0}.json?page={1}'.format(index_url, index)
print('Downloading page {0}'.format(url))
index += 1
try:
content = urllib2.urlopen(url)
sight_urls += _parse_sight_index_page(content)
except urllib2.HTTPError:
break
return sight_urls
def _parse_sight_index_page(html):
"""
Parse a country's sights page from Lonely Planet and return
and array of urls to the information
"""
soup = BeautifulSoup(html)
content = soup.find_all('div', class_='stack__content')[0]
cols = content.find_all('div', class_='col--one-whole')
return [
col.a.get('href')
for col in cols
if col.a.get('href', False)
]
def _parse_sight_info(soup):
"""
Parse the information that appears at the right of the page
"""
dd_list = soup.find_all('dd')
info = {}
for elem in dd_list:
key = get_text(elem.find_previous('dt'))
value = get_text(elem)
if key in info:
info[key] = '{0}<br/>{1}'.format(info[key], value)
else:
info[key] = value
return info
def get_country_city(url):
"""
Parse the given url and return the country and city name
"""
regex = r'\.com/([^/]*).*?/([^/]*)$'
if 'sights' in url:
regex = regex.replace(r'$', r'/sights.*')
try:
country, city = re.findall(regex, url)[0]
except IndexError:
city = None
country = None
return country, city
def parse_sight(url):
"""
Download and parse an individual sight page
Return a dictionary
"""
print('Parsing {0}'.format(url))
country, city = get_country_city(url)
soup = BeautifulSoup(urllib2.urlopen(url).read())
sight = OrderedDict()
sight['title'] = get_text(soup.h1)
sight['url'] = url
sight['city'] = city
sight['country'] = country
attributes = (
'Prices',
'Opening hours',
'More information',
'Address',
'Getting there',
)
info = _parse_sight_info(soup)
for attr_ in attributes:
if attr_ in info:
sight[attr_] = info[attr_]
sight['description'] = get_text(
soup.find_all('div', class_='ttd__section--description')[0]
)
try:
images = soup.find_all('div', class_='tab__content')[0].find_all('img')
prefix = 'http://'
img_url = images[0].get('src')
if img_url[:len(prefix)] != prefix:
try:
img_url = images[0].get('src').split(prefix)[1]
img_url = '{0}{1}'.format(prefix, img_url)
except IndexError:
pass
if 'maps.googleapis.com' not in img_url:
sight['image_src'] = img_url
except IndexError:
pass
return sight
def parse_price(price_string):
"""
Return the result of applying a regex over the string
"""
regex_exp = {
'currencies': u'\u20AC|Dh',
'price': u'[\d\.]+',
'exclude_years': u'(?!-?\d?yr)',
'hours': u'(?!am|pm|hr|\xBD)',
}
regex = u'({currencies})?({price}){exclude_years}{hours}'.format(
**regex_exp
)
return re.findall(regex, price_string)
def parse_opening_hours(input_string):
"""
Return the result of applying a regex over the input string
9am-5pm
11am Mon-Fri
2.30pm Mon-Fri, 11am & 2.30pm Sat & Sun
9.30am-4pm Tue-Sun
9am-7pm Mon-Fri, to 4pm Sat
8pm-midnight Tue-Sun
06:00-18:00
24hr
1-6pm Tue-Sat
10.30am-7pm Tue-Sat, to 5.30pm Sun
6-10am & 4-8pm
9.30am-3.30pm Mon-Fri, to 1pm Sat & Sun
Mass 7.30pm Mon-Sat, 8am, 10am & 7.30pm Sun
10am-8pm May-Aug, shorter hours in winter
10am-1pm & 2-6pm Tue-Sun
9am-6pm Oct-Mar, 9am-7pm Apr, 9am-8pm May-Sep
closed to the public
"""
pass
|
bsd-3-clause
| -1,066,586,597,003,418,100
| 23
| 79
| 0.561704
| false
| 3.358476
| false
| false
| false
|
usccolumbia/CSCE206_Projects
|
WinnerTakeAllPokerGame/winnertakeall.py
|
1
|
4415
|
#University of South Carolina
#CSCE206 Scientific Application Programming
#Fall 2014 Final project
#Poker game
import Tkinter
from Tkinter import *
import random
def shuffledeck():
deck = []
for s in ['Clubs', 'Diamonds', 'Hearts', 'Spades']:
for n in range(2, 15):
deck.append([n, s])
random.shuffle(deck)
return deck
def cardnumber(card):
if card == 11:
return 'Jack'
elif card == 12:
return 'Queen'
elif card == 13:
return 'King'
elif card == 14:
return 'Ace'
else:
return str(card)
def deal(deck):
return deck[::2], deck[1::2]
def play(Jeffrey, siri):
if Jeffrey > siri:
return 'Jeffrey'
elif siri > Jeffrey:
return 'siri'
else:
return 'Tie'
def refill(cardswon):
random.shuffle(cardswon)
return cardswon
deck = shuffledeck()
Jeffreycards, siricards = deal(deck)
inplay = []
round = 0
Jeffreywon = []
siriwon = []
root = Tkinter.Tk()
canvas = Tkinter.Canvas(root)
canvas.grid(row = 0, column = 0)
def getImageName(card):
number=card[0]
suit=card[1]
map={"Diamonds":'d','Hearts':'h','Clubs':'c','Spades':'s'}
if number<10:
return 'Image/'+'0'+str(number)+map[suit]+'.gif'
elif number==14:
return 'Image/01' +map[suit]+'.gif'
else:
return 'Image/'+str(number)+map[suit]+'.gif'
def OnButtonClick():
global labelVariable
global Jeffreywon,siriwon,inplay,deck,round,Jeffreycards, siricards,Tkinter,root,photo1,photo
global canvas
if len(Jeffreycards) == 0 or len(siricards) == 0:
if len(Jeffreycards) > len(siricards):
labelVariable.set("Jeffrey has won the game!")
elif len(siricards) > len(Jeffreycards):
labelVariable.set("siri has won the game!")
# labelVariable.set("game over")
return
round += 1
labelVariable.set( "Time for Round %d" % round)
Jeffreycard = Jeffreycards.pop(0)
siricard = siricards.pop(0)
# print Jeffreycard, siricard
photo = Tkinter.PhotoImage(file = getImageName(Jeffreycard))
canvas.create_image(50,130, image=photo)
photo1=Tkinter.PhotoImage(file = getImageName(siricard))
canvas.create_image(200,130, image=photo1)
inplay.extend([Jeffreycard, siricard])
labelVariable.set( "Jeffrey flips the %s of %s." % (cardnumber(Jeffreycard[0]), Jeffreycard[1]))
labelVariable.set( "siri flips the %s of %s." % (cardnumber(siricard[0]), siricard[1]))
roundwinner = play(Jeffreycard[0], siricard[0])
if roundwinner == 'Jeffrey':
labelVariable1.set( "Jeffrey wins this round!")
Jeffreywon.extend(inplay)
inplay = []
elif roundwinner == 'siri':
labelVariable1.set( "siri wins this round!")
siriwon.extend(inplay)
inplay = []
elif roundwinner == 'Tie':
labelVariable1.set( "Jeffrey and siri have tied!")
labelVariable.set( " %s cards %s cards." % (len(Jeffreywon)+len(Jeffreycards), len(siriwon)+len(siricards)))
if len(Jeffreycards) == 0 and len(Jeffreywon) > 0:
Jeffreycards = refill(Jeffreywon)
Jeffreywon = []
if len(siricards) == 0 and len(siriwon) > 0:
siricards = refill(siriwon)
siriwon = []
photo = Tkinter.PhotoImage(file = 'Image/back111.gif')
canvas.create_image(50,130, image=photo)
photo1 = Tkinter.PhotoImage(file = 'Image/back111.gif')
canvas.create_image(200,130, image=photo1)
# photo1=Tkinter.PhotoImage(file = 'Image/01h.gif')
# canvas.create_image(150,100, image=photo1)
button = Tkinter.Button(root,text=u"Play another round",
command=OnButtonClick)
button.grid(column=1,row=0)
labelVariable = Tkinter.StringVar()
label = Tkinter.Label(root,textvariable=labelVariable,anchor="w",fg="black",bg="white")
label.grid(column=0,row=6,columnspan=2,sticky='EW')
labelVariable.set(u"Let's Play!")
labelVariable1 = Tkinter.StringVar()
label1 = Tkinter.Label(root,textvariable=labelVariable1,anchor="w",fg="black",bg="white")
label1.grid(column=0,row=5,columnspan=1,sticky='EW')
labelVariable1.set(u"Hello!")
labelVariable2 = Tkinter.StringVar()
label2 = Tkinter.Label(root,textvariable=labelVariable2,anchor='w',fg="black",bg="white")
label2.grid(column=0,row=1,columnspan=1,sticky='EW')
labelVariable2.set(u" Jeffrey Siri ")
root.mainloop()
|
mit
| 7,677,825,762,820,851,000
| 26.949367
| 123
| 0.6453
| false
| 2.997284
| false
| false
| false
|
bretthandrews/marvin
|
python/marvin/tools/spectrum.py
|
1
|
7931
|
#!/usr/bin/env python
# encoding: utf-8
#
# spectrum.py
#
# Licensed under a 3-clause BSD license.
# Revision history:
# 13 Apr 2016 J. Sánchez-Gallego
# Initial version
from __future__ import division
from __future__ import print_function
import sys
import numpy as np
import matplotlib.pyplot as plt
class Spectrum(object):
"""A class representing an spectrum with extra functionality.
Parameters:
flux (array-like):
The 1-D array contianing the spectrum.
units (str, optional):
The units of the flux spectrum.
wavelength (array-like, optional):
The wavelength solution for ``spectrum``. Must have the same number
of elements.
ivar (array-like, optional):
The inverse variance array for ``spectrum``. Must have the same
number of elements.
mask (array-like, optional):
The mask array for ``spectrum``. Must have the same number of
elements.
wavelength_unit (str, optional):
The units of the wavelength solution.
"""
def __init__(self, flux, units=None, wavelength_unit=None,
ivar=None, mask=None, wavelength=None):
self.flux = np.array(flux)
self.ivar = np.array(ivar) if ivar is not None else None
self.mask = np.array(mask) if mask is not None else None
self.wavelength = np.array(wavelength) if wavelength is not None else None
self.units = units
self.wavelength_unit = wavelength_unit
# Performs some checks.
assert len(self.flux.shape) == 1, 'spectrum must be 1-D'
if self.ivar is not None:
assert len(self.ivar.shape) == 1, 'ivar must be 1-D'
assert len(self.flux) == len(self.ivar), \
'ivar must have the same lenght as the base spectrum'
if self.mask is not None:
assert len(self.mask.shape) == 1, 'mask must be 1-D'
assert len(self.flux) == len(self.mask), \
'mask must have the same lenght as the base spectrum'
if self.wavelength is not None:
assert len(self.wavelength.shape) == 1, 'wavelength must be 1-D'
assert len(self.flux) == len(self.wavelength), \
'wavelength must have the same lenght as the base spectrum'
def __repr__(self):
"""Representation for Spectrum."""
return '<Marvin Spectrum ({0!s})'.format(self.flux)
def plot(self, array='flux', xlim=None, ylim=(0, None), mask_color=None,
xlabel=None, ylabel=None, figure=None, return_figure=False, **kwargs):
"""Plots a spectrum using matplotlib.
Returns a |axes|_ object with a representation of this spectrum.
The returned ``axes`` object can then be showed, modified, or saved to
a file. If running Marvin from an iPython console and
`matplotlib.pyplot.ion()
<http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.ion>`_,
the plot will be displayed interactivelly.
Parameters:
array ({'flux', 'ivar', 'mask'}):
The array to display, defaults to the internal spectrum with
which the object was initialised.
xlim,ylim (tuple-like or None):
The range to display for the x- and y-axis, respectively,
defined as a tuple of two elements ``[xmin, xmax]``. If
the range is ``None``, the range for the axis will be set
automatically by matploltib. If ``Spectrum.wavelength`` is
defined, the range in the x-axis must be defined as a
wavelength range. Default for ylim is (0, None), which cuts
off negative values but lets the maximum float.
xlabel,ylabel (str or None):
The axis labels to be passed to the plot. If ``xlabel=None``
and ``Spectrum.wavelength_unit`` is defined, those units will
be used, after being properly formatted for Latex display.
If ``ylabel=None``, the y-axis label will be automatically
defined base on the type of input array.
mask_color (matplotlib valid color or None):
If set and ``Spectrum.mask`` is defined, the elements of
``array`` with ``mask`` will be coloured using that value.
More information about `matplotlib colours
<http://matplotlib.org/api/colors_api.html>`_.
figure (matplotlib Figure object or None):
The matplotlib figure object from which the axes must be
created. If ``figure=None``, a new figure will be created.
return_figure (bool):
If ``True``, the matplotlib Figure object used will be returned
along with the axes object.
kwargs (dict):
Any other keyword argument that will be passed to
`matplotlib.pyplot.plot
<http://matplotlib.org/api/pyplot_api.html#matplotlib.pyplot.plot>`_.
Returns:
ax:
The `matplotlib.axes <http://matplotlib.org/api/axes_api.html>`_
object containing the plot representing the spectrum. If
``return_figure=True``, a tuple will be returned of the form
``(ax, fig)``.
Example:
>>> spectrum = Spectrum(np.arange(100), wavelength=np.arange(100)*0.1)
>>> ax = spectrum.plot(xrange=[5, 7])
>>> ax.show()
We can change the range of the axes after the object has been created.
>>> ax.set_xlim(3, 8)
>>> ax.show()
.. |axes| replace:: matplotlib.axes
.. _axes: http://matplotlib.org/api/axes_api.html
"""
array = array.lower()
validSpectrum = ['flux', 'ivar', 'mask']
assert array in validSpectrum, 'array must be one of {0!r}'.format(validSpectrum)
if array == 'flux':
data = self.flux
elif array == 'ivar':
data = self.ivar
elif array == 'mask':
data = self.mask
xaxis = self.wavelength if self.wavelength is not None else np.arange(len(self))
fig = plt.figure() if figure is None else figure
ax = fig.add_subplot(111)
ax.plot(xaxis, data, **kwargs)
# This does not work very well for small ranges of masked elements.
# Probably requires some rethinking.
if mask_color is not None:
mask_indices = np.where(self.mask > 0)
kwargs['color'] = mask_color
ax.plot(xaxis[mask_indices], data[mask_indices], **kwargs)
if xlim is not None:
assert len(xlim) == 2
ax.set_xlim(*xlim)
if ylim is not None:
assert len(ylim) == 2
ax.set_ylim(*ylim)
if xlabel is None:
if self.wavelength is not None:
xlabel = 'Wavelength'
if self.wavelength_unit == 'Angstrom':
xlabel += r' $[\rm\AA]$'
elif self.wavelength_unit is not None:
xlabel += r' [{0}]'.format(self.wavelength_unit)
else:
xlabel = ''
if ylabel is None:
if array == 'flux':
ylabel = 'Flux'
if self.units == '1e-17 erg/s/cm^2/Ang/spaxel':
ylabel += r' $[\rm 10^{-17}\,erg\,s^{-1}\,cm^{-2}\,\AA^{-1}\,spaxel^{-1}]$'
elif self.units is not None:
ylabel += r' [{0}]'.format(self.units)
elif array == 'ivar':
ylabel = 'Inverse variance'
else:
ylabel = ''
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
if return_figure:
return (ax, fig)
else:
return ax
|
bsd-3-clause
| -7,553,470,793,640,075,000
| 37.495146
| 95
| 0.562926
| false
| 4.206897
| false
| false
| false
|
monome/ansible
|
tools/flash_tools/commands/extract/extract_presets.py
|
1
|
1236
|
import argparse
import json
from commands.extract.extractor import PresetExtractor
def extract(args):
extractor = PresetExtractor(args.firmware, args.version, args.hexfile)
presets, image = extractor.extract()
with open(args.out, 'w') as outf:
outf.write(json.dumps(
presets,
indent=4 if args.pretty else None,
))
print('{} preset written to {}'.format(extractor.target_version, args.out))
def command(parser):
parser.add_argument(
'hexfile',
type=str,
help='name of the hex dump file to inspect'
)
parser.add_argument(
'--version',
type=str,
help='firmware version of the ansible which saved the preset',
default='3.0.0'
)
parser.add_argument(
'--target_version',
type=str,
help='firmware version to target with the JSON output'
)
parser.add_argument(
'--out',
type=str,
help='JSON file to write the preset to',
default='ansible-preset.json'
)
parser.add_argument(
'--pretty',
action='store_true',
help='pretty-print the JSON output',
default=False,
)
parser.set_defaults(func=extract)
|
gpl-2.0
| -2,786,293,025,625,807,400
| 25.297872
| 79
| 0.599515
| false
| 3.974277
| false
| false
| false
|
hesamd/hazm
|
hazm/Lemmatizer.py
|
1
|
6668
|
# coding: utf-8
from __future__ import unicode_literals
import codecs
from .utils import default_words, default_verbs
from .Stemmer import Stemmer
from .WordTokenizer import WordTokenizer
class Lemmatizer(object):
"""
>>> lemmatizer = Lemmatizer()
>>> lemmatizer.lemmatize('کتابها')
'کتاب'
>>> lemmatizer.lemmatize('آتشفشان')
'آتشفشان'
>>> lemmatizer.lemmatize('میروم')
'رفت#رو'
>>> lemmatizer.lemmatize('گفته_شده_است')
'گفت#گو'
>>> lemmatizer.lemmatize('نچشیده_است')
'چشید#چش'
>>> lemmatizer.lemmatize('مردم', pos='N')
'مردم'
>>> lemmatizer.lemmatize('اجتماعی', pos='AJ')
'اجتماعی'
"""
def __init__(self, words_file=default_words, verbs_file=default_verbs, joined_verb_parts=True):
self.verbs = {}
self.words = set([])
self.stemmer = Stemmer()
if words_file:
with codecs.open(words_file, encoding='utf8') as words_file:
self.words = set(map(lambda w: w.strip(), words_file))
if verbs_file:
tokenizer = WordTokenizer(verbs_file=verbs_file)
self.verbs['است'] = '#است'
for verb in tokenizer.verbs:
for tense in self.conjugations(verb):
self.verbs[tense] = verb
if joined_verb_parts:
for verb in tokenizer.verbs:
bon = verb.split('#')[0]
for after_verb in tokenizer.after_verbs:
self.verbs[bon + 'ه_' + after_verb] = verb
self.verbs['ن' + bon + 'ه_' + after_verb] = verb
for before_verb in tokenizer.before_verbs:
self.verbs[before_verb + '_' + bon] = verb
def lemmatize(self, word, pos=''):
if not pos and word in self.words:
return word
if (not pos or pos == 'V') and word in self.verbs:
return self.verbs[word]
if pos.startswith('AJ') and word[-1] == 'ی':
return word
if pos == 'PRO':
return word
if word in self.words:
return word
stem = self.stemmer.stem(word)
if stem and stem in self.words:
return stem
return word
def conjugations(self, verb):
"""
>>> lemmatizer = Lemmatizer()
>>> lemmatizer.conjugations('خورد#خور')
['خوردم', 'خوردی', 'خورد', 'خوردیم', 'خوردید', 'خوردند', 'نخوردم', 'نخوردی', 'نخورد', 'نخوردیم', 'نخوردید', 'نخوردند', 'خورم', 'خوری', 'خورد', 'خوریم', 'خورید', 'خورند', 'نخورم', 'نخوری', 'نخورد', 'نخوریم', 'نخورید', 'نخورند', 'میخوردم', 'میخوردی', 'میخورد', 'میخوردیم', 'میخوردید', 'میخوردند', 'نمیخوردم', 'نمیخوردی', 'نمیخورد', 'نمیخوردیم', 'نمیخوردید', 'نمیخوردند', 'خوردهام', 'خوردهای', 'خورده', 'خوردهایم', 'خوردهاید', 'خوردهاند', 'نخوردهام', 'نخوردهای', 'نخورده', 'نخوردهایم', 'نخوردهاید', 'نخوردهاند', 'خورم', 'خوری', 'خورد', 'خوریم', 'خورید', 'خورند', 'نخورم', 'نخوری', 'نخورد', 'نخوریم', 'نخورید', 'نخورند', 'میخورم', 'میخوری', 'میخورد', 'میخوریم', 'میخورید', 'میخورند', 'نمیخورم', 'نمیخوری', 'نمیخورد', 'نمیخوریم', 'نمیخورید', 'نمیخورند', 'بخورم', 'بخوری', 'بخورد', 'بخوریم', 'بخورید', 'بخورند', 'نخورم', 'نخوری', 'نخورد', 'نخوریم', 'نخورید', 'نخورند', 'بخور', 'نخور']
>>> lemmatizer.conjugations('آورد#آور')
['آوردم', 'آوردی', 'آورد', 'آوردیم', 'آوردید', 'آوردند', 'نیاوردم', 'نیاوردی', 'نیاورد', 'نیاوردیم', 'نیاوردید', 'نیاوردند', 'آورم', 'آوری', 'آورد', 'آوریم', 'آورید', 'آورند', 'نیاورم', 'نیاوری', 'نیاورد', 'نیاوریم', 'نیاورید', 'نیاورند', 'میآوردم', 'میآوردی', 'میآورد', 'میآوردیم', 'میآوردید', 'میآوردند', 'نمیآوردم', 'نمیآوردی', 'نمیآورد', 'نمیآوردیم', 'نمیآوردید', 'نمیآوردند', 'آوردهام', 'آوردهای', 'آورده', 'آوردهایم', 'آوردهاید', 'آوردهاند', 'نیاوردهام', 'نیاوردهای', 'نیاورده', 'نیاوردهایم', 'نیاوردهاید', 'نیاوردهاند', 'آورم', 'آوری', 'آورد', 'آوریم', 'آورید', 'آورند', 'نیاورم', 'نیاوری', 'نیاورد', 'نیاوریم', 'نیاورید', 'نیاورند', 'میآورم', 'میآوری', 'میآورد', 'میآوریم', 'میآورید', 'میآورند', 'نمیآورم', 'نمیآوری', 'نمیآورد', 'نمیآوریم', 'نمیآورید', 'نمیآورند', 'بیاورم', 'بیاوری', 'بیاورد', 'بیاوریم', 'بیاورید', 'بیاورند', 'نیاورم', 'نیاوری', 'نیاورد', 'نیاوریم', 'نیاورید', 'نیاورند', 'بیاور', 'نیاور']
"""
past, present = verb.split('#')
ends = ['م', 'ی', '', 'یم', 'ید', 'ند']
if verb == '#هست':
return ['هست' + end for end in ends] + ['نیست' + end for end in ends]
past_simples = [past + end for end in ends]
past_imperfects = ['می' + item for item in past_simples]
ends = ['هام', 'های', 'ه', 'هایم', 'هاید', 'هاند']
past_narratives = [past + end for end in ends]
imperatives = ['ب' + present, 'ن' + present]
if present.endswith('ا') or present in ('آ', 'گو'):
present = present + 'ی'
ends = ['م', 'ی', 'د', 'یم', 'ید', 'ند']
present_simples = [present + end for end in ends]
present_imperfects = ['می' + item for item in present_simples]
present_subjunctives = [item if item.startswith('ب') else 'ب' + item for item in present_simples]
present_not_subjunctives = ['ن' + item for item in present_simples]
with_nots = lambda items: items + list(map(lambda item: 'ن' + item, items))
aa_refinement = lambda items: list(map(lambda item: item.replace('بآ', 'بیا').replace('نآ', 'نیا'), items)) if items[0].startswith('آ') else items
return aa_refinement(with_nots(past_simples) + with_nots(present_simples) + with_nots(past_imperfects) + with_nots(past_narratives) + with_nots(present_simples) + with_nots(present_imperfects) + present_subjunctives + present_not_subjunctives + imperatives)
|
mit
| -5,352,278,371,199,366,000
| 47.388889
| 952
| 0.628205
| false
| 1.735636
| false
| false
| false
|
speignier/suppositoire
|
consistency_check.py
|
1
|
3723
|
# -*- coding: utf-8 -*-
"""
Created on Fri Jun 19 22:31:53 2015
@author: Ilya
"""
# %% prepare
import pandas as pd
from datetime import datetime
import calendar
import re
import numpy as np
dtt = lambda datetime_str: datetime.strptime(datetime_str, '%Y-%m-%d %H:%M:%S')
df_train = pd.read_csv('../data/train.csv')
# %% consistency check
# Time:
dofw_c = map(lambda s: calendar.day_name[dtt(s).weekday()], df_train['Dates'].values)
print 'Days of week and dates are '+['INCONSISTENT','CONSISTENT'][int( (df_train['DayOfWeek']==dofw_c).all() )]
# Space:
# Split Address:
clean_street = lambda street: re.search('[0-9A-Z-]+ *[A-Z]{0,2}$',street.replace('/','').strip()).group(0)
streets_cleaned_splited = df_train['Address'].apply(lambda x: map(clean_street, x.split(' / ')))
df1=pd.concat([streets_cleaned_splited, df_train[['X','Y']]], axis=1)
# Split streets so to have pairs 1 Street - 1 XY:
df_streets_XY = pd.DataFrame.from_records([[lx]+list(l[1:]) for l in df1.values for lx in l[0]], columns=['Street', 'X', 'Y'])
# Compute quantiles to filter XY:
quantiles_to_compute = [0.05, 0.95]
quants = df_streets_XY.groupby('Street').quantile(quantiles_to_compute+[0.5]).rename(index=dict(zip(quantiles_to_compute, ['min','max'])))
#quants = pd.concat([quants, pd.concat([df_streets_XY.groupby('Street').mean()], keys=['mean']).swaplevel(0,1)], verify_integrity=True)
# widen borders
qut=quants.unstack().T
eps=0.5
ksi=2.0 # if 3 then 'bad' points dissappear from 'good's' area
qut=qut.T.swaplevel(0,1, axis=1)
qut['min'] = qut['min'] - eps*(qut[0.5]-qut['min']) # encircle streets locally
qut['max'] = qut['max'] - eps*(qut[0.5]-qut['max']) # encircle streets locally
qut['min'] -= ksi*df_streets_XY[['Y','X']].std() # encircle streets with the std of all points
qut['max'] += ksi*df_streets_XY[['Y','X']].std() # encircle streets with the std of all points
#qut['min'] = qut['min'] - eps*(qut['mean']-qut['min'])
#qut['max'] = qut['max'] - eps*(qut['mean']-qut['max'])
qut = qut[qut.columns[qut.columns.get_level_values(0)!=0.5]]
#qut = qut[qut.columns[qut.columns.get_level_values(0)!='mean']]
qut=qut.swaplevel(0,1, axis=1).T
# convert to tuples:
streets_cleaned_splited=pd.Series(map(tuple,streets_cleaned_splited), index=streets_cleaned_splited.index)
# remove dupes:
list_of_unique_streets=map(list, streets_cleaned_splited.unique())
# find bounds for X and Y
bnds = map(lambda x: qut[x].mean(axis=1).swaplevel(0,1), list_of_unique_streets)
dft=pd.concat(bnds, keys=streets_cleaned_splited.unique(), axis=1)
df_tuple_streets_XY = df_train[['X','Y']].set_index(streets_cleaned_splited) # similar ro df1
df_bounds_tuple_streets_XY = dft[streets_cleaned_splited].T
bool_idx=((df_bounds_tuple_streets_XY['min']<df_tuple_streets_XY) & (df_bounds_tuple_streets_XY['max']>df_tuple_streets_XY)).all(axis=1).values
df_train_good = df_train[bool_idx]
df_train_badgly = df_train[np.logical_not(bool_idx)]
# %% The spatial visualization of the result
import matplotlib.pylab as plt
print 'The good:'
print df_train_good.head()
ax_good = df_train_good.plot(kind='scatter',x='Y',y='X', title='The good', alpha=0.5)
print 'The bad and the ugly:'
print df_train_badgly.head()
ax = df_train_badgly.plot(kind='scatter',x='Y',y='X', title='The bad', alpha=0.5)
ax.set_xlim(ax_good.get_xlim())
ax.set_ylim(ax_good.get_ylim())
ax_bagly = df_train_badgly.plot(kind='scatter',x='Y',y='X', title='The ugly', alpha=0.5)
# Error ellipse
from matplotlib.patches import Ellipse
mn = tuple(df_streets_XY[['Y','X']].mean().values)
ksi_sd = tuple(ksi*df_streets_XY[['Y','X']].std().values)
elg = lambda : Ellipse(xy=mn, alpha=0.3, color='#aa7722', **dict(zip(['width','height'],ksi_sd)))
ax.add_patch(elg())
ax_bagly.add_patch(elg())
|
gpl-2.0
| 5,107,858,348,160,119,000
| 41.306818
| 143
| 0.678754
| false
| 2.52065
| false
| false
| false
|
miing/mci_migo
|
acceptance/tests/emails/email_token_link.py
|
1
|
1115
|
# 6) Ensure tokens work from the email, and also clicking the
# link in the email works.
from sst.actions import (
assert_element,
assert_title_contains,
click_button,
get_element,
go_to,
wait_for,
write_textfield,
)
from u1testutils import mail
from u1testutils.sso import mail as sso_mail
from u1testutils.sst import config
from acceptance import helpers, urls
config.set_base_url_from_env()
NAME = 'Some Name'
# Register the primary account.
primary_email = helpers.register_account(displayname=NAME)
# Register a secondary email, and grab the link from the email sent to
# the secondary address.
secondary_email = mail.make_unique_test_email_address()
go_to(urls.EMAILS)
wait_for(assert_title_contains, "'s email addresses")
write_textfield('id_newemail', secondary_email)
click_button(get_element(name='continue'))
link = sso_mail.get_verification_link_for_address(secondary_email)
# Follow the link from the email to ensure it verifies the secondary
# address.
go_to(link)
click_button(get_element(name='continue'))
wait_for(assert_element, **{'data-qa-id': 'edit_account'})
|
agpl-3.0
| 1,245,991,233,961,042,700
| 28.342105
| 70
| 0.749776
| false
| 3.318452
| false
| true
| false
|
repotvsupertuga/repo
|
plugin.video.zen/resources/lib/sources/watchfilm.py
|
1
|
4031
|
# -*- coding: utf-8 -*-
'''
Exodus Add-on
Copyright (C) 2016 Exodus
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse,base64
import requests
from resources.lib.modules import client
from resources.lib.modules import directstream
from BeautifulSoup import BeautifulSoup
from resources.lib.modules import jsunpack
from schism_net import OPEN_URL
from schism_commons import quality_tag, google_tag, parseDOM, replaceHTMLCodes ,cleantitle_get, cleantitle_query
class source:
def __init__(self):
self.base_link = 'http://watchfilm.to'
self.movie_link = '/movies/%s/'
self.ep_link = '/episode/%s/'
def movie(self, imdb, title, year):
self.zen_url = []
try:
# print("WATCHCARTOON")
title = cleantitle_query(title)
title = title.replace(' ','-')
query = self.movie_link % (title)
url = urlparse.urljoin(self.base_link, query)
return url
except:
return
# http://blackcinema.org/episodes/ash-vs-evil-dead-1x2/
def tvshow(self, imdb, tvdb, tvshowtitle, year):
try:
url = {'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
self.zen_url = []
try:
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
data['season'], data['episode'] = season, episode
episodeid = "%01dx%01d" % (int(data['season']) , int(data['episode']))
title = cleantitle_query(title)
title = title.replace(' ','-')
query = title + "-" + episodeid
query= self.ep_link % query
url = urlparse.urljoin(self.base_link, query)
print("Watchfilm TV SHOW", url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if url == None: return
try:
link = OPEN_URL(url, timeout='10')
print("Watchfilm link", link.content)
html = link.content
r = re.compile('<a href="(.+?)" target="streamplayer">').findall(html)
for result in r:
print("Watchfilm SOURCES", result)
result = result.encode('utf-8')
if result.startswith("//"): result = "http:" + result
if "player.watchfilm.to" in result:
try:
s = OPEN_URL(result, timeout='10')
s = s.content
match = re.compile('file:\s*"(.+?)",label:"(.+?)",').findall(s)
for href, quality in match:
quality = google_tag(href)
print("WONLINE SCRIPTS", href,quality)
sources.append({'source': 'gvideo', 'quality':quality, 'provider': 'Watchfilm', 'url': href, 'direct': True, 'debridonly': False})
except:
pass
try:
s = OPEN_URL(result, timeout='10')
s = s.content
match = re.compile('var ff =\s*"(.+?)";').findall(s)
for href in match:
quality = "SD"
try:host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(href.strip().lower()).netloc)[0]
except: host = 'none'
url = replaceHTMLCodes(href)
url = url.encode('utf-8')
if host in hostDict: sources.append({'source': host, 'quality':quality, 'provider': 'Watchfilm', 'url': href, 'direct': False, 'debridonly': False})
except:
pass
except:
pass
return sources
except:
return sources
def resolve(self, url):
return url
|
gpl-2.0
| -2,366,760,702,463,126,000
| 28.423358
| 156
| 0.639295
| false
| 3.25869
| false
| false
| false
|
kamilkloch/pdr-dfki
|
python/database.py
|
1
|
3427
|
from __future__ import (absolute_import, division, print_function)
from couchdb import Server
from couchdb.design import ViewDefinition
class Database(object):
""" TODO: docstring
"""
def __init__(self, server_url=u'http://127.0.0.1:5984/', db_name='ble-new'):
# 'http://dfki-1239.dfki.uni-kl.de:5984/'
self.server_url, self.db_name = server_url, db_name
self.couch = Server(self.server_url)
self.db = self.couch[self.db_name]
def __getitem__(self, doc_id):
""" returns the database document
"""
return self.db[doc_id]
def _sync_permanent_views(self):
view = ViewDefinition('elvis',
'newest_location_documents_from_elvis', '''
function(doc) {
if (doc.source && doc.source == "elvis" && doc.location)
emit(doc.dest, doc.location.positions[0].timestamp);
}''',
'''
function(keys, values, rereduce) {
if (rereduce) {
var result = {
id: 'fffaef464c42c6ffe0285be3d7da3684',
timestamp: '2113-08-04 19:09:24:089'
};
return (result);
} else {
var result = {
id: keys[0][1],
timestamp: values[0]
};
for (var i = 1, e = keys.length; i < e; ++i) {
if (values[i] > result.timestamp) {
result.timestamp = values[i];
result.id = keys[i][1];
}
}
return (result);
}
}'''
)
view.sync(self.db)
view = ViewDefinition('elvis', 'all_location_documents_from_elvis', '''
function(doc) {
if (doc.source && doc.source == "elvis" && doc.location)
emit([doc.location.positions[doc.location.positions.length-1].timestamp, doc.dest]);
}'''
)
view.sync(self.db)
view = ViewDefinition('elvis', 'all_ble_documents', '''
function(doc) {
if (doc.ble)
emit([doc.ble[doc.ble.length-1].timestamp, doc.source]);
}'''
)
view.sync(self.db)
view = ViewDefinition('elvis', "all_location_documents_from_reckonme", '''
function(doc) {
if (doc.dest && doc.source && doc.timestamp && doc.location && doc.dest == 'elvis') {
emit([doc.timestamp, doc.source])
}
}'''
)
view.sync(self.db)
def view_result(self, view_str):
""" returns a representation of a parameterized view
(either permanent or temporary)
"""
return self.db.view("_design/elvis/_view/" + view_str)
def test():
db = Database()
db._sync_permanent_views()
print(len(list(db.view_result(u'all_location_documents_from_reckonme'))))
for row in db.view_result('all_location_documents_from_reckonme'):
print(row.id)
print(db[row.id])
break
if __name__ == '__main__':
test()
|
mit
| 6,161,475,019,724,955,000
| 33.626263
| 104
| 0.457251
| false
| 4.114046
| false
| false
| false
|
madoodia/codeLab
|
python/modules_platform.py
|
1
|
1829
|
# ===============================================
# MODULE STUDY: platform
import platform
################################ Cross Platform ################################
platform.architecture() # Returns a tuple (bits, linkage)
platform.machine() # Returns the machine type, e.g. 'i386'
platform.node() # Returns the computer’s network name
platform.platform() # Returns a single string identifying the underlying platform with as much useful information as possible.
platform.processor() # Returns the (real) processor name, e.g. 'amdk6'.
platform.python_build() # Returns a tuple (buildno, builddate) stating the Python build number and date as strings.
platform.python_compiler() # Returns a string identifying the compiler used for compiling Python.
platform.python_branch() # Returns a string identifying the Python implementation SCM branch.
platform.python_implementation() # Returns a string identifying the Python implementation
platform.python_revision() # Returns a string identifying the Python implementation SCM revision.
platform.python_version() # Returns the Python version as string 'major.minor.patchlevel'
platform.python_version_tuple() # Returns the Python version as tuple (major, minor, patchlevel) of strings.
platform.release() # Returns the system’s release, e.g. '2.2.0' or 'NT'
platform.system() # Returns the system/OS name, e.g. 'Linux', 'Windows', or 'Java'
platform.version() # Returns the system’s release version
platform.uname() # Fairly portable uname interface.
# Returns a tuple of strings (system, node, release, version, machine, processor) identifying the underlying platform.
platform.win32_ver() # Availability: windows
|
mit
| 965,381,198,625,149,200
| 39.511111
| 146
| 0.664838
| false
| 4.361244
| false
| false
| false
|
BrewCenter/BrewCenterAPI
|
brewcenter_api/brew_data/data_miner/brew_target/fermentables.py
|
1
|
4778
|
"""
Extracts Fermentables from the database, transforms them, and builds a new db.
"""
from brew_data.data_miner.brew_target.utils import clean, convert_country
class Fermentable:
def __init__(self, data):
self.name = data[0]
self.type = data[1]
self.potential = data[2]
self.lovibond = data[3]
self.origin = data[4]
self.supplier = data[5]
self.notes = clean(data[6])
self.coarse_fine_diff = data[7]
self.moisture = data[8]
self.diastatic_power = data[9]
self.protein = data[10]
self.max_in_batch = data[11]
self.is_mashed = data[12]
self.transform()
def transform(self):
"""transforms the data as neccessary to fit our specs"""
self.name = '"' + self.name + '"'
# convert boolean to integer for sqlite
self.is_mashed = (1 if self.is_mashed == 'true' else 0)
# Sugar has a PPG of 46. Multiply the potential percent yield by 46 to
# get PPG of a grain
self.ppg = 46 * (self.potential / 100)
self.country = convert_country(self.origin)
# parse type
if self.type == "Extract":
self.type = "Liquid Malt Extract"
elif self.type == "Dry Extract":
self.type = "Dry Malt Extract"
if len(self.type) == 0:
self.type = "NULL"
else:
self.type = '"' + self.type + '"'
# convert "None" notes to empty
if self.notes is None:
self.notes = '""'
else:
self.notes = '"' + self.notes + '"'
def get_keys():
return ("name, type_id, country_id, notes, ppg, lovibond, moisture, "
"diastatic_power, protein, max_in_batch, is_mashed")
def __str__(self):
format_str = '{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10}'
return format_str.format(
self.name,
self.type_id,
self.country_id,
self.notes,
self.ppg,
self.lovibond,
self.moisture,
self.diastatic_power,
self.protein,
self.max_in_batch,
self.is_mashed,
)
def get_fermentables(s, d):
"""
Gets fermentables from the source (s) and puts them in the destination (d).
"""
d.execute('DROP TABLE IF EXISTS fermentabletype;')
d.execute('DROP TABLE IF EXISTS fermentable;')
d.execute('CREATE TABLE fermentabletype(name TEXT, abbreviation TEXT);')
d.execute('CREATE TABLE fermentable(' \
'name TEXT,' \
'type_id int,' \
'country_id int,' \
'ppg FLOAT,' \
'lovibond FLOAT,' \
'moisture FLOAT,' \
'diastatic_power FLOAT,' \
'protein FLOAT,' \
'max_in_batch FLOAT,' \
'is_mashed INT,' \
'notes TEXT' \
');'
)
s.execute('SELECT "name", "ftype", "yield", "color", "origin", "supplier", "notes", "coarse_fine_diff", "moisture", "diastatic_power", "protein", "max_in_batch", "is_mashed" FROM fermentable WHERE `deleted`=0;')
cur = s.fetchone()
n = 0
while cur:
f = Fermentable(cur)
# check if the country code exists already and add it if it does not
f.country_id = 'NULL'
if f.country is not 'NULL':
d.execute('SELECT `rowid` FROM countrycode WHERE code={0};'.format(f.country))
country_code_id = d.fetchone()
if country_code_id is None:
d.execute('INSERT INTO countrycode(code) VALUES ({0});'.format(f.country))
d.execute('SELECT `rowid` FROM countrycode WHERE code={0};'.format(f.country))
country_code_id = d.fetchone()
f.country_id = country_code_id[0] if country_code_id else 'NULL'
# check if the type already exists and add it if it does not
f.type_id = 'NULL'
if f.type is not 'NULL':
d.execute('SELECT `rowid` FROM fermentabletype WHERE name={0};'.format(f.type))
type_id = d.fetchone()
if type_id is None:
d.execute('INSERT INTO fermentabletype(name) VALUES({0});'.format(f.type))
d.execute('SELECT `rowid` FROM fermentabletype WHERE name={0};'.format(f.type))
type_id = d.fetchone()
f.type_id = type_id[0] if type_id else 'NULL'
query = 'INSERT INTO fermentable({0}) VALUES({1});'.format(Fermentable.get_keys(), str(f))
d.execute(query)
n += 1
cur = s.fetchone()
print("Found {0} fermentables.".format(n))
|
gpl-3.0
| 4,258,352,382,892,321,300
| 36.328125
| 216
| 0.525952
| false
| 3.603318
| false
| false
| false
|
Zen-CODE/kivybits
|
Examples/ColorPickerPopup/main.py
|
1
|
2985
|
'''
Demo Popup with a ColorPicker
'''
from kivy.app import App
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.colorpicker import ColorPicker
from kivy.uix.popup import Popup
from kivy.uix.label import Label
class ColorPopup(App):
'''
This class represent you application. There should be only one per app.
'''
def build(self):
'''
This method is called automatically and should return your "root"
widget.
'''
self.label = Label(text="Colour not selected.")
layout = BoxLayout(
orientation="vertical",
padding=[50, 50, 50, 50])
layout.add_widget(self.label)
layout.add_widget(
Button(
text="Select colour",
on_release=self.select_color))
layout.add_widget(
Button(
text="OK and Cancel",
on_release=lambda inst: self.select_color(inst, False)))
return layout
def select_color(self, instance, no_buts=True):
'''
The button click has fired the event, so show the popup.
no_buts is boolean and specifies whether to include buttons
in the popup or not.
'''
popup = Popup(
title="Select your colour",
size_hint=(0.75, 0.75))
# NOTE: the below properties can also be passed in to the Popup
# constructor but we do them separately for clarity.
if no_buts:
colorPicker = ColorPicker()
popup.bind(
on_dismiss=lambda popup: \
self.popup_dismissed(popup, colorPicker.hex_color))
popup.content = colorPicker
else:
# We prevent the default dismiss behaviour and roll our own in
# the content.
popup.auto_dismiss = False
popup.content = self.get_ok_cancel_content(popup)
popup.open()
def popup_dismissed(self, popup, color):
''' The popup has been dismissed'''
self.label.text = "Colour in hex = " + color
def get_ok_cancel_content(self, popup):
'''Return content with OK and cancel buttons for validating'''
colorPicker = ColorPicker()
buttonLayout = BoxLayout(orientation="horizontal",
padding="5sp",
size_hint_y=0.2)
okButton = Button(
text="Okay",
on_release=lambda but: \
popup.dismiss() and \
self.popup_dismissed(popup, colorPicker.hex_color))
cancelButton = Button(
text="Cancel",
on_release=lambda but: popup.dismiss())
buttonLayout.add_widget(okButton)
buttonLayout.add_widget(cancelButton)
mainLayout = BoxLayout(orientation="vertical")
mainLayout.add_widget(colorPicker)
mainLayout.add_widget(buttonLayout)
return mainLayout
if __name__ == '__main__':
ColorPopup().run()
|
mit
| -1,846,627,650,480,030,200
| 32.166667
| 75
| 0.586265
| false
| 4.301153
| false
| false
| false
|
pkrebs/WIDPS
|
fw_modules/module_daemon.py
|
1
|
5578
|
#!/usr/bin/python
# -*- coding: iso-8859-15 -*-
#
# module_daemon.py - WIDS/WIPS framework frame daemon base class module
# Copyright (C) 2009 Peter Krebs, Herbert Haas
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by the
# Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, see http://www.gnu.org/licenses/gpl-2.0.html
"""Daemon module template
Provides the Daemon class which turns another python class into a daemon process.
This module was thankfully obtained from Sander Marechal at:
http://www.jejik.com/articles/2007/02/a_simple_unix_linux_daemon_in_python
"""
# Imports
#
# Custom modules
# Standard modules
import atexit
import os
from signal import SIGTERM, SIGKILL
import sys
import time
class DaemonClass():
"""
A generic daemon class.
Usage: subclass the Daemon class and override the run() method
"""
def __init__(self, pidfile, stdin='/dev/null', stdout='/dev/null', stderr='/dev/null'):
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.pidfile = pidfile
self.pid = None
def daemonize(self):
"""
do the UNIX double-fork magic, see Stevens' "Advanced
Programming in the UNIX Environment" for details (ISBN 0201563177)
http://www.erlenstar.demon.co.uk/unix/faq_2.html#SEC16
"""
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# decouple from parent environment
#os.chdir("/")
os.chdir(os.getcwd()) # set current working directory instead of root
os.setsid()
os.umask(0)
# do second fork
try:
pid = os.fork()
if pid > 0:
# exit from second parent
sys.exit(0)
except OSError, e:
sys.stderr.write("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
# redirect standard file descriptors
sys.stdout.flush()
sys.stderr.flush()
si = file(self.stdin, 'r')
so = file(self.stdout, 'a+')
se = file(self.stderr, 'a+', 0)
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
# write pidfile
#atexit.register(self.delpid)
self.pid = str(os.getpid())
file(self.pidfile,'w+').write("%s\n" % self.pid)
def delpid(self):
"""
Removes the pidfile.
"""
try:
os.remove(self.pidfile)
except OSError:
print "No pidfile to remove"
def start(self):
"""
Start the daemon
"""
# Check for a pidfile to see if the daemon already runs
try:
pf = file(self.pidfile,'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if pid:
message = "pidfile %s already exist. Daemon already running?\n"
sys.stderr.write(message % self.pidfile)
sys.exit(1)
# Start the daemon
self.daemonize()
self.run()
def stop(self):
"""
Stop the daemon
"""
# Get the pid from the pidfile
try:
pf = file(self.pidfile,'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
if not pid:
message = "pidfile %s does not exist. Daemon not running?\n"
sys.stderr.write(message % self.pidfile)
return # not an error in a restart
# Try killing the daemon process
killcounter = 0
kill_threshold = 20
try:
while 1:
os.kill(pid, SIGTERM)
killcounter = killcounter + 1
if killcounter > kill_threshold:
message = "Process not reacting, sending SIGKILL\n"
sys.stderr.write(message)
os.kill(pid, SIGKILL)
killcounter = 0
time.sleep(1)
except OSError, err:
err = str(err)
if err.find("No such process") > 0:
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
else:
print str(err)
sys.exit(1)
def restart(self):
"""
Restart the daemon
"""
self.stop()
self.start()
def run(self):
"""
You should override this method when you subclass Daemon. It will be called after the process has been
daemonized by start() or restart().
"""
pass
if __name__ == "__main__":
print "Warning: This module is not intended to be executed directly. Only do this for test purposes."
|
gpl-2.0
| 7,683,708,546,887,967,000
| 28.209424
| 110
| 0.542668
| false
| 4.165795
| false
| false
| false
|
fyabc/MiniGames
|
HearthStone2/MyHearthStone/ui/ui_pyqt/ui_dialog_create_deck.py
|
1
|
11521
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'dialog_create_deck.ui'
#
# Created by: PyQt5 UI code generator 5.9
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_DialogCreateDeck(object):
def setupUi(self, DialogCreateDeck):
DialogCreateDeck.setObjectName("DialogCreateDeck")
DialogCreateDeck.resize(351, 418)
self.verticalLayout = QtWidgets.QVBoxLayout(DialogCreateDeck)
self.verticalLayout.setSizeConstraint(QtWidgets.QLayout.SetDefaultConstraint)
self.verticalLayout.setObjectName("verticalLayout")
self.group_class = QtWidgets.QGroupBox(DialogCreateDeck)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.group_class.setFont(font)
self.group_class.setObjectName("group_class")
self.gridLayout_2 = QtWidgets.QGridLayout(self.group_class)
self.gridLayout_2.setObjectName("gridLayout_2")
self.radioButton_Druid = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_Druid.setFont(font)
self.radioButton_Druid.setChecked(True)
self.radioButton_Druid.setObjectName("radioButton_Druid")
self.gridLayout_2.addWidget(self.radioButton_Druid, 0, 0, 1, 1)
self.radioButton_Hunter = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_Hunter.setFont(font)
self.radioButton_Hunter.setObjectName("radioButton_Hunter")
self.gridLayout_2.addWidget(self.radioButton_Hunter, 0, 1, 1, 1)
self.radioButton_Mage = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_Mage.setFont(font)
self.radioButton_Mage.setObjectName("radioButton_Mage")
self.gridLayout_2.addWidget(self.radioButton_Mage, 0, 2, 1, 1)
self.radioButton_Priest = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_Priest.setFont(font)
self.radioButton_Priest.setObjectName("radioButton_Priest")
self.gridLayout_2.addWidget(self.radioButton_Priest, 1, 0, 1, 1)
self.radioButton_Shaman = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_Shaman.setFont(font)
self.radioButton_Shaman.setObjectName("radioButton_Shaman")
self.gridLayout_2.addWidget(self.radioButton_Shaman, 1, 1, 1, 1)
self.radioButton_Rogue = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_Rogue.setFont(font)
self.radioButton_Rogue.setObjectName("radioButton_Rogue")
self.gridLayout_2.addWidget(self.radioButton_Rogue, 1, 2, 1, 1)
self.radioButton_Paladin = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_Paladin.setFont(font)
self.radioButton_Paladin.setObjectName("radioButton_Paladin")
self.gridLayout_2.addWidget(self.radioButton_Paladin, 2, 0, 1, 1)
self.radioButton_Warlock = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_Warlock.setFont(font)
self.radioButton_Warlock.setObjectName("radioButton_Warlock")
self.gridLayout_2.addWidget(self.radioButton_Warlock, 2, 1, 1, 1)
self.radioButton_Warrior = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_Warrior.setFont(font)
self.radioButton_Warrior.setObjectName("radioButton_Warrior")
self.gridLayout_2.addWidget(self.radioButton_Warrior, 2, 2, 1, 1)
self.radioButton_Monk = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_Monk.setFont(font)
self.radioButton_Monk.setObjectName("radioButton_Monk")
self.gridLayout_2.addWidget(self.radioButton_Monk, 3, 0, 1, 1)
self.radioButton_DeathKnight = QtWidgets.QRadioButton(self.group_class)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_DeathKnight.setFont(font)
self.radioButton_DeathKnight.setObjectName("radioButton_DeathKnight")
self.gridLayout_2.addWidget(self.radioButton_DeathKnight, 3, 1, 1, 1)
self.radioButton_Druid.raise_()
self.radioButton_Hunter.raise_()
self.radioButton_Mage.raise_()
self.radioButton_Priest.raise_()
self.radioButton_Shaman.raise_()
self.radioButton_Rogue.raise_()
self.radioButton_Paladin.raise_()
self.radioButton_Paladin.raise_()
self.radioButton_Warlock.raise_()
self.radioButton_Warrior.raise_()
self.radioButton_Monk.raise_()
self.radioButton_DeathKnight.raise_()
self.verticalLayout.addWidget(self.group_class)
self.line = QtWidgets.QFrame(DialogCreateDeck)
self.line.setFrameShape(QtWidgets.QFrame.HLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.verticalLayout.addWidget(self.line)
self.group_mode = QtWidgets.QGroupBox(DialogCreateDeck)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.group_mode.setFont(font)
self.group_mode.setObjectName("group_mode")
self.gridLayout = QtWidgets.QGridLayout(self.group_mode)
self.gridLayout.setObjectName("gridLayout")
self.radioButton_standard = QtWidgets.QRadioButton(self.group_mode)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_standard.setFont(font)
self.radioButton_standard.setChecked(True)
self.radioButton_standard.setObjectName("radioButton_standard")
self.gridLayout.addWidget(self.radioButton_standard, 0, 0, 1, 1)
self.radioButton_wild = QtWidgets.QRadioButton(self.group_mode)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(False)
font.setWeight(50)
self.radioButton_wild.setFont(font)
self.radioButton_wild.setObjectName("radioButton_wild")
self.gridLayout.addWidget(self.radioButton_wild, 0, 1, 1, 1)
self.verticalLayout.addWidget(self.group_mode)
self.line_2 = QtWidgets.QFrame(DialogCreateDeck)
self.line_2.setFrameShape(QtWidgets.QFrame.HLine)
self.line_2.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line_2.setObjectName("line_2")
self.verticalLayout.addWidget(self.line_2)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.label_deck_name = QtWidgets.QLabel(DialogCreateDeck)
font = QtGui.QFont()
font.setFamily("Microsoft YaHei UI")
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.label_deck_name.setFont(font)
self.label_deck_name.setObjectName("label_deck_name")
self.horizontalLayout.addWidget(self.label_deck_name, 0, QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.edit_deck_name = QtWidgets.QLineEdit(DialogCreateDeck)
self.edit_deck_name.setObjectName("edit_deck_name")
self.horizontalLayout.addWidget(self.edit_deck_name, 0, QtCore.Qt.AlignTop)
self.verticalLayout.addLayout(self.horizontalLayout)
self.buttonBox = QtWidgets.QDialogButtonBox(DialogCreateDeck)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.buttonBox.sizePolicy().hasHeightForWidth())
self.buttonBox.setSizePolicy(sizePolicy)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox, 0, QtCore.Qt.AlignBottom)
self.retranslateUi(DialogCreateDeck)
self.buttonBox.accepted.connect(DialogCreateDeck.accept)
self.buttonBox.rejected.connect(DialogCreateDeck.reject)
QtCore.QMetaObject.connectSlotsByName(DialogCreateDeck)
def retranslateUi(self, DialogCreateDeck):
_translate = QtCore.QCoreApplication.translate
DialogCreateDeck.setWindowTitle(_translate("DialogCreateDeck", "新建套牌"))
self.group_class.setTitle(_translate("DialogCreateDeck", "选择职业"))
self.radioButton_Druid.setText(_translate("DialogCreateDeck", "德鲁伊"))
self.radioButton_Hunter.setText(_translate("DialogCreateDeck", "猎人"))
self.radioButton_Mage.setText(_translate("DialogCreateDeck", "法师"))
self.radioButton_Priest.setText(_translate("DialogCreateDeck", "牧师"))
self.radioButton_Shaman.setText(_translate("DialogCreateDeck", "萨满祭司"))
self.radioButton_Rogue.setText(_translate("DialogCreateDeck", "潜行者"))
self.radioButton_Paladin.setText(_translate("DialogCreateDeck", "圣骑士"))
self.radioButton_Warlock.setText(_translate("DialogCreateDeck", "术士"))
self.radioButton_Warrior.setText(_translate("DialogCreateDeck", "战士"))
self.radioButton_Monk.setText(_translate("DialogCreateDeck", "武僧"))
self.radioButton_DeathKnight.setText(_translate("DialogCreateDeck", "死亡骑士"))
self.group_mode.setTitle(_translate("DialogCreateDeck", "选择模式"))
self.radioButton_standard.setText(_translate("DialogCreateDeck", "标准模式"))
self.radioButton_wild.setText(_translate("DialogCreateDeck", "狂野模式"))
self.label_deck_name.setText(_translate("DialogCreateDeck", "套牌名称"))
|
mit
| 2,872,133,755,172,138,000
| 48.415584
| 106
| 0.687516
| false
| 3.511227
| false
| false
| false
|
natanocr/instadown
|
instadown.py
|
1
|
1429
|
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
from flask import Flask, request, render_template
import requests
from bs4 import BeautifulSoup
import os
app = Flask(__name__)
header = {"User-Agent":"instadown", "e-mail":"contato@contato.com"}
def get_data(url):
r = requests.get(url, headers=header)
_url_video = ''
if r.status_code == 200:
sopa = BeautifulSoup(r.content)
for meta in sopa.findAll("meta"):
if meta.get("property") == "og:title" and meta.get("content") != None:
_content_title = meta.get("content")
if meta.get("property") == "og:video" and meta.get("content") != None:
_url_video = meta.get("content")
elif meta.get("property") == "og:image" and meta.get("content") != None:
_url_image = meta.get("content")
if _url_video == '':
return dict(title=_content_title, image=_url_image)
else:
return dict(title=_content_title, video=_url_video)
return None
@app.route('/', methods=['GET', 'POST'])
def post():
if request.method == 'POST':
_url = request.form['url']
data = get_data(_url)
print data
return render_template('home.html', content_dow=data)
return render_template('home.html')
if __name__ == '__main__':
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
mit
| 5,269,061,608,380,642,000
| 29.404255
| 84
| 0.582225
| false
| 3.476886
| false
| false
| false
|
danmoser/pyhdust
|
pyhdust/rotstars.py
|
1
|
28843
|
# -*- coding:utf-8 -*-
"""PyHdust *rotstars* module: Rotating stars tools.
:co-author: Rodrigo Vieira
:license: GNU GPL v3.0 https://github.com/danmoser/pyhdust/blob/master/LICENSE
"""
from __future__ import print_function
import os as _os
import re as _re
import numpy as _np
from pyhdust import hdtpath as _hdtpath
from scipy.interpolate import griddata as _griddata
import pyhdust.phc as _phc
import tarfile as _tarfile
import warnings as _warn
# try:
# import matplotlib.pyplot as _plt
# from scipy import interpolate as _interpolate
# except:
# print('# Warning! matplotlib and/or scipy module not installed!!!')
__author__ = "Daniel Moser"
__email__ = "dmfaes@gmail.com"
def readscr(scrfile):
''' Read source generated with *ref_estrela.txt*.
OUTPUT: M, Req and TP (2*solar units and K).
'''
f0 = open(scrfile)
lines = f0.readlines()
f0.close()
n = int(_phc.fltTxtOccur('STAR =', lines, n=1))
M = _phc.fltTxtOccur('M =', lines, n=n)
Rp = _phc.fltTxtOccur('R_pole =', lines, n=n)
if n == 2:
ob = _phc.fltTxtOccur('R_eq/R_pole =', lines, n=1)
Tp = _phc.fltTxtOccur('Teff_pole =', lines, n=1)
else:
W = _phc.fltTxtOccur('W =', lines, n=1)
bet = _phc.fltTxtOccur('Beta_GD =', lines, n=1)
L = _phc.fltTxtOccur('L =', lines, n=n)
wfrac = _np.sqrt(27. / 8 * (1 + 0.5 * W**2)**-3 * W**2)
ob, Tp, A = rotStar(Tp=L, M=M, rp=Rp, beta=bet, wfrac=wfrac,
quiet=True, LnotTp=True)
# print M,Rp*ob,Tp
return M, Rp * ob, Tp
def vrot_scr(scrfile, old=True):
""" Returns the ``vrot`` value of a given source star.
OUTPUT: vrot in km/s. """
M, Req, Tp = readscr(scrfile)
# Be_M04.80_ob1.40_H0.30_Z0.014_bE_Ell
if old:
rule = '(?<=_ob)(.+)(?=_H)'
ob = float(_re.findall(rule, scrfile)[0])
else:
rule = '(?<=_W)(.+)(?=_t)'
W = float(_re.findall(rule, scrfile)[0])
ob = 1. + .5 * W**2
vrot = wrot(ob, is_ob=True) * \
_np.sqrt(_phc.G.cgs * _phc.Msun.cgs * M / Req / _phc.Rsun.cgs)
return vrot*1e-5
def wrot(par, is_ob=False):
r""" Converts :math:`w_{\rm frac} = \Omega/\Omega_c` into
:math:`W = vrot/vorb`.
If ``is_ob == True``, it considers the param as the oblateness (instead of
:math:`w_{\rm frac}`). """
if is_ob:
wfrac = (1.5 ** 1.5) * _np.sqrt(2. * (par - 1.) / par ** 3)
else:
wfrac = par
if wfrac != 0.:
gam = 2. * _np.cos((_np.pi + _np.arccos(wfrac)) / 3.)
W = _np.sqrt(gam ** 3 / wfrac)
else:
W = 0.
return W
def wfrac_rot(W):
""" Returns wfrac (Omega/Omega_crit) value from a W value.
Equation 1.23 de Faes (2015).
"""
if W < 0 or W > 1:
_warn.warn('Invalid W value')
return _np.sqrt(27/8.*W**2/(1+.5*W**2)**3)
def beta(par, is_ob=False):
r""" Calculate the :math:`\beta` value from Espinosa-Lara for a given
rotation rate :math:`w_{\rm frac} = \Omega/\Omega_c`
If ``is_ob == True``, it consider the param as ob (instead of
:math:`w_{\rm frac}`). """
wfrac = par
if is_ob: # Ekstrom et al. 2008, Eq. 9
wfrac = (1.5 ** 1.5) * _np.sqrt(2. * (par - 1.) / par ** 3)
# avoid exceptions
if wfrac == 0:
return .25
elif wfrac == 1:
return 0.13535
elif wfrac < 0 or wfrac > 1:
_warn.warn('Invalid value of wfrac.')
return 0.
# Espinosa-Lara VLTI-School 2013 lecture, slide 18...
delt = 1.
omega1 = 0.
omega = wfrac
while delt >= 1e-5:
f = (3. / (2. + omega**2))**3 * omega**2 - wfrac**2
df = -108. * omega * (omega**2 - 1.) / (omega**2 + 2.)**4
omega1 = omega - f / df
delt = _np.abs(omega1 - omega) / omega
omega = omega1
nthe = 99
theta = _np.linspace(0, _np.pi / 2, nthe + 2)[1:-1]
grav = _np.zeros(nthe)
teff = _np.zeros(nthe)
corr = _np.zeros(nthe)
beta = 0.
for ithe in range(nthe):
delt = 1.
r1 = 0.
r = 1.
while delt >= 1e-5:
f = omega**2 * r**3 * \
_np.sin(theta[ithe])**2 - (2. + omega**2) * r + 2.
df = 3. * omega**2 * r**2 * \
_np.sin(theta[ithe])**2 - (2. + omega**2)
r1 = r - f / df
delt = _np.abs(r1 - r) / r
r = r1
delt = 1.
n1 = 0.
ftheta = 1. / 3. * omega**2 * r**3 * _np.cos(theta[ithe])**3 + \
_np.cos(theta[ithe]) + _np.log(_np.tan(theta[ithe] / 2.))
n = theta[ithe]
while delt >= 1e-5:
f = _np.cos(n) + _np.log(_np.tan(n / 2.)) - ftheta
df = -_np.sin(n) + 1. / _np.sin(n)
n1 = n - f / df
delt = abs(n1 - n) / n
n = n1
grav[ithe] = _np.sqrt(1. / r**4 + omega**4 * r**2 * _np.sin(
theta[ithe])**2 - 2. * omega**2 * _np.sin(theta[ithe])**2 / r)
corr[ithe] = _np.sqrt(_np.tan(n) / _np.tan(theta[ithe]))
teff[ithe] = corr[ithe] * grav[ithe]**0.25
u = ~_np.isnan(teff)
coef = _np.polyfit(_np.log(grav[u]), _np.log(teff[u]), 1)
beta = coef[0]
return beta
def ellips_th(th, rf):
""" Ellipsoid radius
:param th: theta, in radians (0 = pole; pi/2 = equator).
:param rt: radius fraction (Req/Rp >= 1)
"""
return _np.sqrt(_np.cos(th)**2 + (rf*_np.sin(th))**2)
def rt(th, wfrac):
""" Roche Rpole normalized radius as function of wfrac.
:param th: theta, in radians (0 = pole; pi/2 = equator).
Based on Mc.Gill(?) and J. Patrick Harrington (notes) formula:
``r = 3/wfrac/np.sin(th)*np.cos(1/3.*(np.pi+np.arccos(wfrac*np.sin(th))))``
"""
if wfrac == 0:
wfrac = 1e-9
if th == 0:
r = 1.
else:
r = (-3. * _np.cos((_np.arccos(wfrac * _np.sin(th)) + 4 *
_np.pi) / 3)) / (wfrac * _np.sin(th))
return r
def rotStar(Tp=20000., M=10.3065, rp=5.38462, star='B', beta=0.25, wfrac=0.8,
th_res=5001, quiet=False, LnotTp=False):
""" Return the photospheric parameters of a rotating star.
``LnotTp``: the value of "Tp" is the Luminosity (in solar units).
Calculation of Von Zeipel's Beta parameter as function of W: see math...
INPUT: th_res (theta resolution, integer)...
OUTPUT: printed status + (ob, Tp values, Area[cm2])
"""
Rsun = _phc.Rsun.cgs
Msun = _phc.Msun.cgs
Lsun = _phc.Lsun.cgs
G = _phc.G.cgs
# AU = _phc.au.cgs
# pc = _phc.pc.cgs
sigma = _phc.sigma.cgs
M = M * Msun
rp = rp * Rsun
if wfrac == 0.:
wfrac = 1e-9
if LnotTp:
# Tp = (Tp * Lsun / 4. / _np.pi / rp**2 / sigma)**.25
Tp = (Tp*Lsun / sigma / sigma4b_cranmer(M/Msun, wfrac))**0.25 * \
(G*M / rp**2)**beta
# DEFS ###
# rh = outside
def area(wfrac):
ths = _np.linspace(_np.pi / 2, 0, th_res)
a = 0.
for i in range(len(ths)):
a = a + 2 * _np.pi * rt(ths[i], wfrac) ** 2 * _np.sin(ths[i])
return 2 * a * ths[-2]
def g(wfrac, M, rp, th):
wcrit = _np.sqrt(8 * G * M / (27 * rp ** 3))
g = (wcrit * wfrac) ** 2 * rp * rt(th, wfrac) * \
_np.sin(th) ** 2 - G * M / (rp * rt(th, wfrac)) ** 2
return g
def lum(wfrac, Tp, rp, M, C, beta):
ths = _np.linspace(_np.pi / 2, 0, th_res)
L = 0.
for i in range(len(ths)):
L = L + rt(ths[i], wfrac) ** 2 * _np.sin(ths[i]) * \
(abs(g(wfrac, M, rp, ths[i]))) ** (4 * beta)
return 2 * 2 * _np.pi * ths[-2] * sigma * rp ** 2 * C ** (4 * beta) * L
def lumf(wfrac, Tp, rp, M, beta):
ths = _np.linspace(_np.pi / 2, 0, th_res)
L = 0.
for i in range(len(ths)):
L = L + rt(ths[i], wfrac) ** 2 * _np.sin(ths[i]) * \
abs(g(wfrac, M, rp, ths[i])) ** (4 * beta)
return L * ths[-2] * rp ** 2
if star.startswith('B'):
Bstars = _np.array(bestarsHarm1988, dtype=str)
if star in Bstars:
i = _np.where(Bstars[:, 0] == star)
i = i[0][0]
print(Bstars[i][0])
Tp = float(Bstars[i][1])
M = float(Bstars[i][2]) * Msun
rp = float(Bstars[i][3]) * Rsun
# comentar linha abaixo se 1a. rodada:
# Tp = 27438.63 #K
wcrit = _np.sqrt(8 * G * M / (27 * rp ** 3))
C = Tp ** (1. / beta) / abs(G * M / rp ** 2)
vrot = wcrit * wfrac * rp * rt(_np.pi / 2, wfrac)
lum0 = 4 * _np.pi * rp ** 2 * sigma * Tp ** 4 / Lsun
# a = rp**2*Tp**4*abs(g(wfrac,M,rp,0.))**(4*beta)
# print('Teff_pol* = %.2f' % ( (a/b)**beta ) )
b = lumf(wfrac, Tp, rp, M, beta)
c = lumf(0.0001, Tp, rp, M, beta)
Cw = (c / b) ** (1. / (4. * beta)) * C
ob = rt(_np.pi / 2, wfrac) # /(rp / Rsun)
# OUTPUT ###
if not quiet:
print('# Parameters:')
print('wfrac = %.4f' % (wfrac))
print('W = %.4f' % (_np.sqrt(2 * (ob - 1))))
print('Star Mass = %.2f Msun' % (M / Msun))
print('Rpole = %.2f Rsun' % (rp / Rsun))
print('Req = %.2f Rpole' % (rt(_np.pi / 2, wfrac)))
print('Teff_pol = %.1f' % (Tp))
print('Star Area = %.2f' % (area(wfrac)))
print('Star Lum. = %.1f' % (lum(wfrac, Tp, rp, C, M, beta) / Lsun))
print('Star Lum.*= %.1f' % (lum0))
print('vrot(km/s)= %.1f' % (vrot / 1e5))
print('vorb(km/s)= %.1f' %
(_np.sqrt(G * M / rp / rt(_np.pi / 2, wfrac)) / 1e5) )
print('vcrt(km/s)= %.1f' % (wcrit * rp * rt(_np.pi / 2, 1.) / 1e5))
print('log(g)pole= %.2f' % (_np.log10(abs(g(wfrac, M, rp, 0.))) ))
print('log(g)eq = %.2f' %
(_np.log10(abs(g(wfrac, M, rp, _np.pi / 2))) ))
print('Teff_eq = %.1f' %
( (C * abs(g(wfrac, M, rp, _np.pi / 2))) ** beta) )
print('Teff_eq* = %.1f' %
( (Cw * abs(g(wfrac, M, rp, _np.pi / 2))) ** beta) )
print('Teff_pol* = %.2f' % ( (Cw * abs(g(wfrac, M, rp, 0.))) ** beta) )
print('T_pol/eq* = %.4f' % ((Cw * abs(g(wfrac, M, rp, 0.))) ** beta /
(Cw * abs(g(wfrac, M, rp, _np.pi / 2))) ** beta) )
print('# \"*\" == case where L is constant!')
return ob, (Cw * abs(g(wfrac, M, rp, 0.))) ** beta, area(wfrac) * (rp**2)
def rochearea(wfrac, isW=False):
""" Calculate the Roche area of a rigid rotator.
Equation 4.23 from Cranmer 1996 (thesis).
Area in (squared) radial unit (it must be multiplied to Rpole**2 to a
physical size).
"""
if isW:
w = wfrac_rot(wfrac)
else:
w = wfrac
return 4*_np.pi*(1+.19444*w**2+0.28053*w**2-1.9014*w**6+6.8298*w**8-
9.502*w**10+4.6631*w**12)
def sigma4b_cranmer(M, w):
'''Computes sigma4b defined in Cranmer 1996 (Eq. 4.22)
Usage:
s4b = sigma4b_cranmer(M, w)
where w=Omega/Omega_c, M=stellar mass in Msun (from 1.7 to 20.)
'''
dir0 = '{0}/refs/tables/'.format(_hdtpath())
tab = _np.load(dir0 + 'sigma4b_cranmer.npz')
s4b = _griddata(tab['parlist'], tab['sigma4b'], _np.array([M, w]),
method='linear')[0]
return s4b
bestarsHarm1988 = [
# The numbers below are based on Harmanec 1988
# B1.5 and B2.5 interpolated by Faes.
# Teff fixed: Rp2 from Lum1; Lum2 from Rp1.
# SpType Teff Mass Rp Lum '' Rp2 Lum2
['B0.0', 29854., 14.57, 05.80, 23948.8487173, 6.19, 27290.],
['B0.5', 28510., 13.19, 05.46, 17651.9502267, 5.80, 19953.],
['B1.0', 26182., 11.03, 04.91, 10152.9628687, 5.24, 11588.],
['B1.5', 24599., 09.72, 04.58, 6883.65832266, 4.87, 07768.],
['B2.0', 23121., 08.62, 04.28, 4691.72482578, 4.55, 05297.],
['B2.5', 20980., 07.18, 03.90, 2641.00783143, 4.11, 02931.],
['B3.0', 19055., 06.07, 03.56, 1497.45695726, 3.78, 01690.],
['B4.0', 17179., 05.12, 03.26, 829.555139678, 3.48, 00946.],
['B5.0', 15488., 04.36, 03.01, 467.232334920, 3.21, 00530.],
['B6.0', 14093., 03.80, 02.81, 279.154727515, 2.99, 00316.],
['B7.0', 12942., 03.38, 02.65, 176.569574061, 2.82, 00200.],
['B8.0', 11561., 02.91, 02.44, 95.3190701227, 2.61, 00109.],
['B9.0', 10351., 02.52, 02.25, 52.0850169839, 2.39, 0059.1]]
# ['B9.5', 09886., 02.38, 02.17, 00046., 2.32, 40.3107085348]]
bestarsSK1982 = [
# Schmidt-Kaller1982. Used (and interpolated) by Porter1996, Townsedn2004,
# SpType Teff Mass Rp Lum
['B0.0', 30105., 17.5, 7.70, 43651.],
['B0.5', 27859., 14.6, 6.90, 25703.],
['B1.0', 25985., 12.5, 6.30, 16218.],
['B1.5', 24347., 10.8, 5.70, 10232.],
['B2.0', 22813., 09.6, 5.40, 07079.],
['B2.5', 21498., 08.6, 5.00, 04786.],
['B3.0', 20222., 07.7, 4.70, 03311.],
['B4.0', 18206., 06.4, 4.20, 01737.],
['B5.0', 16673., 05.5, 3.80, 01000.],
['B6.0', 15302., 04.8, 3.50, 00602.],
['B7.0', 14103., 04.2, 3.20, 00363.],
['B8.0', 13202., 03.8, 3.00, 00245.],
['B9.0', 12246., 03.4, 2.80, 00158.]]
bestarsdJN1987 = [
# Derived by de Jager & Niewuwenhuijzen 1987 to the main sequence (b=5.)
# lum class IV (b=4.); Used by Cranmer2005
# Conclusion: 5 and 4 apper do be ZAMS and mid-MS; 3 late MS
# Conclusion: SpTypes appear to be shifhed by -1.0 here (cooler stars)
# SpType b-val Teff_V Mass_V Rp_5 Lum_V Teff_4 Mass_4 Rp_4 Lum_4
['B0.0', 1.200, 26841, 13.8, 6.58, 20134., 26911, 15.11, 7.84, 28919.],
['B0.5', 1.350, 24944, 11.4, 5.82, 11742., 24809, 12.30, 6.90, 16183.],
['B1.0', 1.500, 23213, 9.63, 5.16, 06917., 22915, 10.17, 6.11, 09222.],
['B1.5', 1.650, 21629, 8.17, 4.58, 04118., 21204, 08.54, 5.44, 05355.],
['B2.0', 1.800, 20178, 7.01, 4.08, 02478., 19655, 07.27, 4.87, 03171.],
['B2.5', 1.875, 19498, 6.51, 3.86, 01930., 18935, 06.74, 4.62, 02458.],
['B3.0', 1.950, 18846, 6.07, 3.65, 01508., 18250, 06.27, 4.39, 01915.],
['B4.0', 2.100, 17621, 5.31, 3.28, 00928., 16972, 05.48, 3.99, 01181.],
['B5.0', 2.250, 16493, 4.69, 2.95, 00578., 15810, 04.84, 3.64, 00743.],
['B6.0', 2.400, 15452, 4.18, 2.67, 00364., 14749, 04.33, 3.36, 00478.],
['B7.0', 2.550, 14491, 3.75, 2.42, 00232., 13780, 03.91, 3.12, 00314.],
['B8.0', 2.700, 13601, 3.40, 2.21, 00150., 12893, 03.57, 2.92, 00211.],
['B9.0', 2.850, 12778, 3.10, 2.03, 00098., 12080, 03.29, 2.76, 00145.]]
bestarsdJN1987_3 = [
# Derived by de Jager & Niewuwenhuijzen 1987 to the main sequence (b=5.)
# lum class IV (b=4.); Used by Cranmer2005
# Conclusions with Geneva models: class III is still in the main sequence!
# (but leaving, ~Achernar)
# Conclusion: SpTypes appear to be shifhed by -1 step here (cooler stars)
# SpType b-val Teff_3 Mass_3 Rp_3 Lum_3
['B0.0', 1.200, 25030, 14.8, 9.93, 34661.],
['B0.5', 1.350, 23009, 12.2, 8.92, 19969.],
['B1.0', 1.500, 21198, 10.2, 8.05, 11731.],
['B1.5', 1.650, 19570, 8.65, 7.31, 07032.],
['B2.0', 1.800, 18105, 7.43, 6.69, 04305.],
['B2.5', 1.875, 17427, 6.93, 6.41, 03396.],
['B3.0', 1.950, 16782, 6.48, 6.16, 02693.],
['B4.0', 2.100, 15586, 5.71, 5.71, 01723.],
['B5.0', 2.250, 14502, 5.10, 5.33, 01128.],
['B6.0', 2.400, 13519, 4.60, 5.03, 00756.],
['B7.0', 2.550, 12624, 4.20, 4.78, 00520.],
['B8.0', 2.700, 11809, 3.86, 4.58, 00366.],
['B9.0', 2.850, 11065, 3.59, 4.43, 00264.]]
bestarsBeAtlas = [
# H = 0.3 core
# For ob=1.10, i.e., one *CAN'T* apply 4*pi*R^2...
# SpType Tpole Teff Mass Rp Lum
['B0.0', _np.NaN, _np.NaN, _np.NaN, _np.NaN, _np.NaN],
['B0.5', 28905.8, 26765.7, 14.6, 7.50, 31183.26],
['B1.0', 26945.8, 24950.9, 12.5, 6.82, 19471.38],
['B1.5', 25085.2, 23228.2, 10.8, 6.23, 12204.70],
['B2.0', 23629.3, 21879.9, 09.6, 5.80, 08327.67],
['B2.5', 22296.1, 20645.4, 08.6, 5.43, 05785.96],
['B3.0', 20919.7, 19370.9, 07.7, 5.11, 03971.25],
['B4.0', 18739.3, 17351.9, 06.4, 4.62, 02090.08],
['B5.0', 17063.8, 15800.5, 05.5, 4.26, 01221.76],
['B6.0', 15587.7, 14433.6, 04.8, 4.02, 00757.60],
['B7.0', 14300.3, 13241.6, 04.2, 3.72, 00459.55],
['B8.0', 13329.9, 12343.0, 03.8, 3.55, 00315.96],
['B9.0', 12307.1, 11395.9, 03.4, 3.37, 00206.89]]
bestarsBeAtlas_N = [
# For ob=1.10
# SpType Tpole Teff Mass Rp Lum
['B0.0', 28905.8, 26765.7, 14.6, 7.50, 31183.26],
['B0.5', 26945.8, 24950.9, 12.5, 6.82, 19471.38],
['B1.0', 25085.2, 23228.2, 10.8, 6.23, 12204.70],
['B1.5', 23629.3, 21879.9, 09.6, 5.80, 08327.67],
['B2.0', 22296.1, 20645.4, 08.6, 5.43, 05785.96],
['B2.5', 20919.7, 19370.9, 07.7, 5.11, 03971.25],
['B3.0', 18739.3, 17351.9, 06.4, 4.62, 02090.08],
['B4.0', 17063.8, 15800.5, 05.5, 4.26, 01221.76],
['B5.0', 15587.7, 14433.6, 04.8, 4.02, 00757.60],
['B6.0', 14300.3, 13241.6, 04.2, 3.72, 00459.55],
['B7.0', 13329.9, 12343.0, 03.8, 3.55, 00315.96],
['B8.0', 12307.1, 11395.9, 03.4, 3.37, 00206.89],
['B9.0', _np.NaN, _np.NaN, _np.NaN, _np.NaN, _np.NaN]]
def oblat2w(oblat):
'''
Converts oblateness into wc=Omega/Omega_crit
Ekstrom et al. 2008, Eq. 9
Usage:
w = oblat2w(oblat)
'''
w = (1.5**1.5) * _np.sqrt(2.*(oblat - 1.) / oblat**3.)
return w
def geneva_closest(Mstar, oblat, t, Zstr='014', tar=None, silent=True):
'''
Interpolate models between rotation rates, at closest Mstar.
Usage:
Rpole, logL = geneva_closest(Mstar, oblat, t, Zstr='014', tar=None,
silent=True)
where t is given in tMS, and tar is the open tar file. The chosen
metallicity is according to the input tar file. If tar=None, the
code will take Zstr='014' by default.
'''
# oblat to Omega/Omega_c
w = oblat2w(oblat)
# grid
if Mstar <= 20.:
Mlist = _np.array([1.7, 2., 2.5, 3., 4., 5., 7., 9., 12., 15., 20.])
Mstr = _np.array(['1p700', '2p000', '2p500', '3p000', '4p000', '5p000',
'7p000', '9p000', '12p00', '15p00', '20p00'])
Vlist = _np.array([0., 0.1, 0.3, 0.5, 0.6, 0.7, 0.8, 0.9, 0.95])
Vstr = _np.array(['00000', '10000', '30000', '50000', '60000', '70000',
'80000', '90000', '95000'])
else:
Mlist = _np.array([20., 25., 32., 40., 60., 85., 120.])
Mstr = _np.array(['20p00', '25p00', '32p00', '40p00', '60p00', '85p00',
'120p0'])
Vlist = _np.array([0., 0.568])
Vstr = _np.array(['00000', '56800'])
# read tar file
if tar is None:
dir0 = '{0}/refs/geneva_models/'.format(_hdtpath())
fmod = 'Z{:}.tar.gz'.format(Zstr)
tar = _tarfile.open(dir0 + fmod, 'r:gz')
else:
Zstr = tar.getnames()[0][7:10]
# find closest Mstar
iM = _np.where(_np.abs(Mstar-Mlist) == _np.min(_np.abs(Mstar-Mlist)))[0][0]
# find values at selected age
nw = len(Vlist)
wlist = _np.zeros(nw)
Rplist = _np.zeros(nw)
logLlist = _np.zeros(nw)
agelist = _np.zeros(nw)
for iw, vs in enumerate(Vstr):
fname = 'M{:}Z{:}00V{:}.dat'.format(Mstr[iM], Zstr, vs)
age1, _, logL1, _, Hfrac1, _, _, w1, Rpole1 = geneva_read(fname,
tar=tar)
t1 = age1 / age1[_np.where(Hfrac1 == 0.)[0][0]-1]
if t > t1.max() and not silent:
print('[geneva_closest] Warning: requested age not available, '
'taking t/tMS={:.2f} instead of t/tMS={:.2f}.'.format(
t1.max(), t))
it = _np.where(_np.abs(t-t1) == _np.min(_np.abs(t-t1)))[0][0]
wlist[iw] = w1[it]
Rplist[iw] = Rpole1[it]
logLlist[iw] = logL1[it]
agelist[iw] = age1[it] / 1e6
# interpolate between rotation rates
if w <= wlist.max():
Rpole = _griddata(wlist, Rplist, [w], method='linear')[0]
logL = _griddata(wlist, logLlist, [w], method='linear')[0]
age = _griddata(wlist, agelist, [w], method='linear')[0]
else:
if not silent:
print('[geneva_closest] Warning: no model rotating this fast at '
'this age, taking closest model instead. (omega={:.2f} '
'instead of omega={:.2f})'.format(wlist.max(), w))
iwmax = _np.where(wlist == wlist.max())[0][0]
Rpole = Rplist[iwmax]
logL = logLlist[iwmax]
age = agelist[iwmax]
return Rpole, logL, age
def geneva_interp(Mstar, oblat, t, Zstr='014', tar=None, silent=True):
'''
Interpolates Geneva stellar models.
Usage:
Rpole, logL, age = geneva_interp(Mstar, oblat, t, tar=None, silent=True)
where t is given in tMS, and tar is the open tar file. The chosen
metallicity is according to the input tar file. If tar=None, the
code will take Zstr='014' by default.
'''
# oblat to Omega/Omega_c
# w = oblat2w(oblat)
# grid
if Mstar <= 20.:
Mlist = _np.array([1.7, 2., 2.5, 3., 4., 5., 7., 9., 12., 15., 20.])
else:
Mlist = _np.array([20., 25., 32., 40., 60., 85., 120.])
# read tar file
if tar is None:
dir0 = '{0}/refs/geneva_models/'.format(_hdtpath())
fmod = 'Z{:}.tar.gz'.format(Zstr)
tar = _tarfile.open(dir0 + fmod, 'r:gz')
else:
Zstr = tar.getnames()[0][7:10]
# interpolation
if (Mstar >= Mlist.min()) * (Mstar <= Mlist.max()):
if (Mstar == Mlist).any():
Rpole, logL, age = geneva_closest(Mstar, oblat, t, tar=tar,
Zstr=Zstr, silent=silent)
else:
# nearest value at left
Mleft = Mlist[Mlist < Mstar]
Mleft = Mleft[_np.abs(Mleft - Mstar).argmin()]
iMleft = _np.where(Mlist == Mleft)[0][0]
Rpolel, logLl, agel = geneva_closest(Mlist[iMleft], oblat, t,
tar=tar, Zstr=Zstr, silent=silent)
# nearest value at right
Mright = Mlist[Mlist > Mstar]
Mright = Mright[_np.abs(Mright - Mstar).argmin()]
iMright = _np.where(Mlist == Mright)[0][0]
Rpoler, logLr, ager = geneva_closest(Mlist[iMright], oblat, t,
tar=tar, Zstr=Zstr, silent=silent)
# interpolate between masses
weight = _np.array([Mright-Mstar, Mstar-Mleft]) / (Mright-Mleft)
Rpole = weight.dot(_np.array([Rpolel, Rpoler]))
logL = weight.dot(_np.array([logLl, logLr]))
age = weight.dot(_np.array([agel, ager]))
else:
if not silent:
print('[geneva_interp] Warning: Mstar out of available range, '
'taking the closest value.')
Rpole, logL, age = geneva_closest(Mstar, oblat, t, tar=tar, Zstr=Zstr,
silent=silent)
return Rpole, logL, age
def geneva_interp_fast(Mstar, oblat, t, Zstr='014', silent=True):
'''
Interpolates Geneva stellar models, from grid of
pre-computed interpolations.
Usage:
Rpole, logL, age = geneva_interp_fast(Mstar, oblat, t, Zstr='014')
where t is given in tMS, and tar is the open tar file. For now, only
Zstr='014' is available.
'''
# read grid
dir0 = '{0}/refs/geneva_models/'.format(_hdtpath())
if Mstar <= 20.:
fname = 'geneva_interp_Z{:}.npz'.format(Zstr)
else:
fname = 'geneva_interp_Z{:}_highM.npz'.format(Zstr)
data = _np.load(dir0 + fname)
Mstar_arr = data['Mstar_arr']
oblat_arr = data['oblat_arr']
t_arr = data['t_arr']
Rpole_grid = data['Rpole_grid']
logL_grid = data['logL_grid']
age_grid = data['age_grid']
# build grid of parameters
par_grid = []
for M in Mstar_arr:
for ob in oblat_arr:
for tt in t_arr:
par_grid.append([M, ob, tt])
par_grid = _np.array(par_grid)
# set input/output parameters
par = _np.array([Mstar, oblat, t])
# set ranges
ranges = _np.array([[par_grid[:, i].min(), par_grid[:, i].max()] for i in
range(len(par))])
# find neighbours
keep, out, inside_ranges, par, par_grid = _phc.find_neighbours(par,
par_grid, ranges)
# interpolation method
if inside_ranges:
interp_method = 'linear'
else:
if not silent:
print('[geneva_interp_fast] Warning: parameters out of available '
'range, taking closest model')
interp_method = 'nearest'
if len(keep[keep]) == 1:
# coincidence
Rpole = Rpole_grid.flatten()[keep][0]
logL = logL_grid.flatten()[keep][0]
age = age_grid.flatten()[keep][0]
else:
# interpolation
Rpole = _griddata(par_grid[keep], Rpole_grid.flatten()[keep], par,
method=interp_method, rescale=True)[0]
logL = _griddata(par_grid[keep], logL_grid.flatten()[keep], par,
method=interp_method, rescale=True)[0]
age = _griddata(par_grid[keep], age_grid.flatten()[keep], par,
method=interp_method, rescale=True)[0]
return Rpole, logL, age
def geneva_pre_computed(Zstr='014', silent=False):
'''
Create geneva pre-computed grid
'''
dir0 = '{0}/refs/geneva_models/'.format(_hdtpath())
if _os.path.exists(dir0 + 'geneva_interp_Z{:}.npz'.format(Zstr)):
data = _np.load(dir0 + 'geneva_interp_Z{:}.npz'.format(Zstr))
else:
# par grid
Mstar_arr = _np.array(
[1.7, 2., 2.5, 3., 4., 5., 7., 9., 12., 15., 20.])
oblat_arr = _np.linspace(1., 1.5, 6)
t_arr = _np.hstack([_np.linspace(0., .9, 10),
_np.linspace(1., 1.1, 21)])
Rpole_grid = _np.zeros([len(Mstar_arr), len(oblat_arr), len(t_arr)])
logL_grid = _np.zeros([len(Mstar_arr), len(oblat_arr), len(t_arr)])
age_grid = _np.zeros([len(Mstar_arr), len(oblat_arr), len(t_arr)])
# read tar file
tar = _tarfile.open(dir0 + 'Z{:}.tar.gz'.format(Zstr), 'r:gz')
for iM, Mstar in enumerate(Mstar_arr):
for iob, oblat in enumerate(oblat_arr):
for it, t in enumerate(t_arr):
if not silent:
print(Mstar, oblat, t)
Rp, lL, age = geneva_interp(Mstar, oblat, t, tar=tar,
Zstr=Zstr, silent=silent)
Rpole_grid[iM, iob, it] = Rp
logL_grid[iM, iob, it] = lL
age_grid[iM, iob, it] = age
_np.savez(dir0 + 'geneva_interp_Z{:}'.format(Zstr),
Mstar_arr=Mstar_arr, oblat_arr=oblat_arr, t_arr=t_arr,
Rpole_grid=Rpole_grid, logL_grid=logL_grid, age_grid=age_grid)
# high M
if _os.path.exists(dir0 + 'geneva_interp_Z{:}_highM.npz'.format(Zstr)):
data = _np.load(dir0 + 'geneva_interp_Z{:}_highM.npz'.format(Zstr))
return
# par grid
Mstar_arr = _np.array([20., 25., 32., 40., 60., 85., 120.])
oblat_arr = _np.linspace(1., 1.05633802817, 2)
t_arr = _np.hstack([_np.linspace(0., .9, 10),
_np.linspace(1., 1.1, 21)])
Rpole_grid = _np.zeros([len(Mstar_arr), len(oblat_arr), len(t_arr)])
logL_grid = _np.zeros([len(Mstar_arr), len(oblat_arr), len(t_arr)])
age_grid = _np.zeros([len(Mstar_arr), len(oblat_arr), len(t_arr)])
# read tar file
tar = _tarfile.open(dir0 + 'Z{:}.tar.gz'.format(Zstr), 'r:gz')
for iM, Mstar in enumerate(Mstar_arr):
for iob, oblat in enumerate(oblat_arr):
for it, t in enumerate(t_arr):
if not silent:
print(Mstar, oblat, t)
Rp, lL, age = geneva_interp(Mstar, oblat, t, tar=tar,
Zstr=Zstr, silent=silent)
Rpole_grid[iM, iob, it] = Rp
logL_grid[iM, iob, it] = lL
age_grid[iM, iob, it] = age
_np.savez(dir0 + 'geneva_interp_Z{:}_highM'.format(Zstr),
Mstar_arr=Mstar_arr, oblat_arr=oblat_arr, t_arr=t_arr,
Rpole_grid=Rpole_grid, logL_grid=logL_grid, age_grid=age_grid)
return
def geneva_read(fname, Zstr='014', tar=None):
'''
Reads Geneva model file
Usage:
age, Mstar, logL, logTeff, Hfrac, Hefrac, oblat, w, Rpole =
geneva_read(fname, tar=None)
where tar is the read tar(.gz) opened file.
'''
# read tar file
if tar is None:
dir0 = '{0}/refs/geneva_models/'.format(_hdtpath())
fmod = 'Z{:}.tar.gz'.format(Zstr)
tar = _tarfile.open(dir0 + fmod, 'r:gz')
else:
Zstr = tar.getnames()[0][7:10]
m = tar.getmember(fname)
fname = tar.extractfile(m)
# (age, M, logL, logTeff, Hfrac, Hefrac, oblat, w, Rpole)
cols = (1, 2, 3, 4, 21, 22, 34, 39, 44)
t = _np.loadtxt(fname, usecols=cols, skiprows=2)
age = t[:, 0]
Mstar = t[:, 1]
logL = t[:, 2]
logTeff = t[:, 3]
Hfrac = t[:, 4]
Hefrac = t[:, 5]
oblat = 1./t[:, 6]
w = t[:, 7]
Rpole = t[:, 8]
return age, Mstar, logL, logTeff, Hfrac, Hefrac, oblat, w, Rpole
# MAIN ###
if __name__ == "__main__":
pass
|
gpl-3.0
| 1,490,024,845,966,270,700
| 34.874378
| 80
| 0.519745
| false
| 2.523447
| false
| false
| false
|
axbaretto/beam
|
sdks/python/.tox/docs/lib/python2.7/site-packages/sphinx/errors.py
|
1
|
1963
|
# -*- coding: utf-8 -*-
"""
sphinx.errors
~~~~~~~~~~~~~
Contains SphinxError and a few subclasses (in an extra module to avoid
circular import problems).
:copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
class SphinxError(Exception):
"""
Base class for Sphinx errors that are shown to the user in a nicer
way than normal exceptions.
"""
category = 'Sphinx error'
class SphinxWarning(SphinxError):
"""Raised for warnings if warnings are treated as errors."""
category = 'Warning, treated as error'
class ExtensionError(SphinxError):
"""Raised if something's wrong with the configuration."""
category = 'Extension error'
def __init__(self, message, orig_exc=None):
SphinxError.__init__(self, message)
self.orig_exc = orig_exc
def __repr__(self):
if self.orig_exc:
return '%s(%r, %r)' % (self.__class__.__name__,
self.message, self.orig_exc)
return '%s(%r)' % (self.__class__.__name__, self.message)
def __str__(self):
parent_str = SphinxError.__str__(self)
if self.orig_exc:
return '%s (exception: %s)' % (parent_str, self.orig_exc)
return parent_str
class ConfigError(SphinxError):
category = 'Configuration error'
class ThemeError(SphinxError):
category = 'Theme error'
class VersionRequirementError(SphinxError):
category = 'Sphinx version error'
class PycodeError(Exception):
def __str__(self):
res = self.args[0]
if len(self.args) > 1:
res += ' (exception was: %r)' % self.args[1]
return res
class SphinxParallelError(SphinxError):
category = 'Sphinx parallel build error'
def __init__(self, message, traceback):
self.message = message
self.traceback = traceback
def __str__(self):
return self.message
|
apache-2.0
| 1,207,145,405,419,724,500
| 24.493506
| 74
| 0.608253
| false
| 4.030801
| false
| false
| false
|
okolisny/integration_tests
|
cfme/tests/test_rest.py
|
1
|
32165
|
# -*- coding: utf-8 -*-
"""This module contains REST API specific tests."""
import random
import pytest
import fauxfactory
from cfme import test_requirements
from cfme.infrastructure.provider.rhevm import RHEVMProvider
from cfme.infrastructure.provider.virtualcenter import VMwareProvider
from cfme.rest.gen_data import arbitration_rules as _arbitration_rules
from cfme.rest.gen_data import arbitration_settings as _arbitration_settings
from cfme.rest.gen_data import automation_requests_data
from cfme.rest.gen_data import vm as _vm
from fixtures.provider import setup_one_or_skip
from cfme.utils import error
from cfme.utils.blockers import BZ
from cfme.utils.providers import ProviderFilter
from cfme.utils.rest import assert_response
from cfme.utils.version import current_version
from cfme.utils.wait import wait_for, wait_for_decorator
pytestmark = [test_requirements.rest]
@pytest.fixture(scope="module")
def a_provider(request):
pf = ProviderFilter(classes=[VMwareProvider, RHEVMProvider])
return setup_one_or_skip(request, filters=[pf])
@pytest.fixture(scope='module')
def api_version(appliance):
entry_point = appliance.rest_api._versions.values()[0]
return appliance.new_rest_api_instance(entry_point=entry_point)
@pytest.fixture(scope="function")
def vm(request, a_provider, appliance):
return _vm(request, a_provider, appliance.rest_api)
def wait_for_requests(requests):
def _finished():
for request in requests:
request.reload()
if request.request_state != 'finished':
return False
return True
wait_for(_finished, num_sec=45, delay=5, message="requests finished")
@pytest.mark.tier(2)
@pytest.mark.parametrize(
"from_detail", [True, False],
ids=["from_detail", "from_collection"])
def test_vm_scan(appliance, vm, from_detail):
rest_vm = appliance.rest_api.collections.vms.get(name=vm)
if from_detail:
response = rest_vm.action.scan()
else:
response, = appliance.rest_api.collections.vms.action.scan(rest_vm)
assert_response(appliance)
@wait_for_decorator(timeout="5m", delay=5, message="REST running scanning vm finishes")
def _finished():
response.task.reload()
if response.task.status.lower() in {"error"}:
pytest.fail("Error when running scan vm method: `{}`".format(response.task.message))
return response.task.state.lower() == 'finished'
COLLECTIONS_ADDED_IN_58 = {
"actions", "alert_definitions", "alerts", "authentications", "configuration_script_payloads",
"configuration_script_sources", "load_balancers",
}
COLLECTIONS_REMOVED_IN_59 = {
"arbitration_settings", "arbitration_profiles", "virtual_templates", "arbitration_rules",
}
COLLECTIONS_ALL = {
"actions", "alert_definitions", "alerts", "arbitration_profiles",
"arbitration_rules", "arbitration_settings", "authentications", "automate",
"automate_domains", "automation_requests", "availability_zones",
"blueprints", "categories", "chargebacks", "cloud_networks", "clusters",
"conditions", "configuration_script_payloads",
"configuration_script_sources", "container_deployments", "currencies",
"data_stores", "events", "features", "flavors", "groups", "hosts",
"instances", "load_balancers", "measures", "notifications",
"orchestration_templates", "pictures", "policies", "policy_actions",
"policy_profiles", "providers", "provision_dialogs", "provision_requests",
"rates", "reports", "request_tasks", "requests", "resource_pools",
"results", "roles", "security_groups", "servers", "service_catalogs",
"service_dialogs", "service_orders", "service_requests",
"service_templates", "services", "settings", "tags", "tasks", "templates",
"tenants", "users", "virtual_templates", "vms", "zones"
}
# non-typical collections without "id" and "resources"
COLLECTIONS_OMMITED = {"settings"}
@pytest.mark.tier(3)
@pytest.mark.parametrize("collection_name", COLLECTIONS_ALL)
@pytest.mark.uncollectif(
lambda collection_name:
(collection_name in COLLECTIONS_OMMITED) or
(collection_name in COLLECTIONS_ADDED_IN_58 and current_version() < "5.8") or
(collection_name in COLLECTIONS_REMOVED_IN_59 and current_version() >= "5.9")
)
def test_query_simple_collections(appliance, collection_name):
"""This test tries to load each of the listed collections. 'Simple' collection means that they
have no usable actions that we could try to run
Steps:
* GET /api/<collection_name>
Metadata:
test_flag: rest
"""
collection = getattr(appliance.rest_api.collections, collection_name)
assert_response(appliance)
collection.reload()
list(collection)
@pytest.mark.tier(3)
@pytest.mark.parametrize("collection_name", COLLECTIONS_ALL)
@pytest.mark.uncollectif(
lambda collection_name:
(collection_name in COLLECTIONS_OMMITED) or
(collection_name in COLLECTIONS_ADDED_IN_58 and current_version() < "5.8") or
(collection_name in COLLECTIONS_REMOVED_IN_59 and current_version() >= "5.9")
)
def test_query_with_api_version(api_version, collection_name):
"""Loads each of the listed collections using /api/<version>/<collection>.
Steps:
* GET /api/<version>/<collection_name>
Metadata:
test_flag: rest
"""
collection = getattr(api_version.collections, collection_name)
assert_response(api_version)
collection.reload()
list(collection)
# collections affected by BZ 1437201 in versions < 5.9
COLLECTIONS_BUGGY_ATTRS = {"results", "service_catalogs", "automate", "categories", "roles"}
@pytest.mark.tier(3)
@pytest.mark.parametrize("collection_name", COLLECTIONS_ALL)
@pytest.mark.uncollectif(
lambda collection_name:
(collection_name in COLLECTIONS_ADDED_IN_58 and current_version() < "5.8") or
(collection_name in COLLECTIONS_REMOVED_IN_59 and current_version() >= "5.9")
)
@pytest.mark.meta(blockers=['GH#ManageIQ/manageiq:15754'])
def test_select_attributes(appliance, collection_name):
"""Tests that it's possible to limit returned attributes.
Metadata:
test_flag: rest
"""
if collection_name in COLLECTIONS_BUGGY_ATTRS and current_version() < '5.9':
pytest.skip("Affected by BZ 1437201, cannot test.")
collection = getattr(appliance.rest_api.collections, collection_name)
response = appliance.rest_api.get(
'{}{}'.format(collection._href, '?expand=resources&attributes=id'))
assert_response(appliance)
for resource in response.get('resources', []):
assert 'id' in resource
expected_len = 2 if 'href' in resource else 1
assert len(resource) == expected_len
def test_add_picture(appliance):
"""Tests adding picture.
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.pictures
count = collection.count
collection.action.create({
"extension": "png",
"content": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcS"
"JAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="})
assert_response(appliance)
collection.reload()
assert collection.count == count + 1
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_add_picture_invalid_extension(appliance):
"""Tests adding picture with invalid extension.
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.pictures
count = collection.count
with error.expected('Extension must be'):
collection.action.create({
"extension": "xcf",
"content": "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcS"
"JAAAADUlEQVR42mNk+M9QDwADhgGAWjR9awAAAABJRU5ErkJggg=="})
assert_response(appliance, http_status=400)
collection.reload()
assert collection.count == count
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_add_picture_invalid_data(appliance):
"""Tests adding picture with invalid content.
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.pictures
count = collection.count
with error.expected('invalid base64'):
collection.action.create({
"extension": "png",
"content": "invalid"})
assert_response(appliance, http_status=400)
collection.reload()
assert collection.count == count
def test_http_options(appliance):
"""Tests OPTIONS http method.
Metadata:
test_flag: rest
"""
assert 'boot_time' in appliance.rest_api.collections.vms.options()['attributes']
assert_response(appliance)
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
@pytest.mark.parametrize("collection_name", ["hosts", "clusters"])
def test_http_options_node_types(appliance, collection_name):
"""Tests that OPTIONS http method on Hosts and Clusters collection returns node_types.
Metadata:
test_flag: rest
"""
collection = getattr(appliance.rest_api.collections, collection_name)
assert 'node_types' in collection.options()['data']
assert_response(appliance)
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_http_options_subcollections(appliance):
"""Tests that OPTIONS returns supported subcollections.
Metadata:
test_flag: rest
"""
assert 'tags' in appliance.rest_api.collections.vms.options()['subcollections']
assert_response(appliance)
def test_server_info(appliance):
"""Check that server info is present.
Metadata:
test_flag: rest
"""
assert all(item in appliance.rest_api.server_info for item in ('appliance', 'build', 'version'))
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_server_info_href(appliance):
"""Check that appliance's server, zone and region is present.
Metadata:
test_flag: rest
"""
items = ('server_href', 'zone_href', 'region_href')
for item in items:
assert item in appliance.rest_api.server_info
assert 'id' in appliance.rest_api.get(appliance.rest_api.server_info[item])
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_default_region(appliance):
"""Check that the default region is present.
Metadata:
test_flag: rest
"""
reg = appliance.rest_api.collections.regions[0]
assert hasattr(reg, 'guid')
assert hasattr(reg, 'region')
def test_product_info(appliance):
"""Check that product info is present.
Metadata:
test_flag: rest
"""
assert all(item in appliance.rest_api.product_info for item in
('copyright', 'name', 'name_full', 'support_website', 'support_website_text'))
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_settings_collection(appliance):
"""Checks that all expected info is present in /api/settings.
Metadata:
test_flag: rest
"""
# the "settings" collection is untypical as it doesn't have "resources" and
# for this reason can't be reloaded (bug in api client)
body = appliance.rest_api.get(appliance.rest_api.collections.settings._href)
assert all(item in body.keys() for item in ('product', 'prototype'))
def test_identity(appliance):
"""Check that user's identity is present.
Metadata:
test_flag: rest
"""
assert all(item in appliance.rest_api.identity for item in
('userid', 'name', 'group', 'role', 'tenant', 'groups'))
def test_user_settings(appliance):
"""Check that user's settings are returned.
Metadata:
test_flag: rest
"""
assert isinstance(appliance.rest_api.settings, dict)
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_datetime_filtering(appliance, a_provider):
"""Tests support for DateTime filtering with timestamps in YYYY-MM-DDTHH:MM:SSZ format.
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.vms
url_string = '{}{}'.format(
collection._href,
'?expand=resources&attributes=created_on&sort_by=created_on&sort_order=asc'
'&filter[]=created_on{}{}')
vms_num = len(collection)
assert vms_num > 3
baseline_vm = collection[vms_num / 2]
baseline_datetime = baseline_vm._data['created_on'] # YYYY-MM-DDTHH:MM:SSZ
def _get_filtered_resources(operator):
return appliance.rest_api.get(url_string.format(operator, baseline_datetime))['resources']
older_resources = _get_filtered_resources('<')
newer_resources = _get_filtered_resources('>')
matching_resources = _get_filtered_resources('=')
# this will fail once BZ1437529 is fixed
# should be: ``assert matching_resources``
assert not matching_resources
if older_resources:
last_older = collection.get(id=older_resources[-1]['id'])
assert last_older.created_on < baseline_vm.created_on
if newer_resources:
first_newer = collection.get(id=newer_resources[0]['id'])
# this will fail once BZ1437529 is fixed
# should be: ``assert first_newer.created_on > baseline_vm.created_on``
assert first_newer.created_on == baseline_vm.created_on
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_date_filtering(appliance, a_provider):
"""Tests support for DateTime filtering with timestamps in YYYY-MM-DD format.
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.vms
url_string = '{}{}'.format(
collection._href,
'?expand=resources&attributes=created_on&sort_by=created_on&sort_order=desc'
'&filter[]=created_on{}{}')
vms_num = len(collection)
assert vms_num > 3
baseline_vm = collection[vms_num / 2]
baseline_date, _ = baseline_vm._data['created_on'].split('T') # YYYY-MM-DD
def _get_filtered_resources(operator):
return appliance.rest_api.get(url_string.format(operator, baseline_date))['resources']
older_resources = _get_filtered_resources('<')
newer_resources = _get_filtered_resources('>')
matching_resources = _get_filtered_resources('=')
assert matching_resources
if newer_resources:
last_newer = collection.get(id=newer_resources[-1]['id'])
assert last_newer.created_on > baseline_vm.created_on
if older_resources:
first_older = collection.get(id=older_resources[0]['id'])
assert first_older.created_on < baseline_vm.created_on
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_resources_hiding(appliance):
"""Test that it's possible to hide resources in response.
Metadata:
test_flag: rest
"""
roles = appliance.rest_api.collections.roles
resources_visible = appliance.rest_api.get(roles._href + '?filter[]=read_only=true')
assert_response(appliance)
assert 'resources' in resources_visible
resources_hidden = appliance.rest_api.get(
roles._href + '?filter[]=read_only=true&hide=resources')
assert_response(appliance)
assert 'resources' not in resources_hidden
assert resources_hidden['subcount'] == resources_visible['subcount']
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_sorting_by_attributes(appliance):
"""Test that it's possible to sort resources by attributes.
Metadata:
test_flag: rest
"""
url_string = '{}{}'.format(
appliance.rest_api.collections.groups._href,
'?expand=resources&attributes=id&sort_by=id&sort_order={}')
response_asc = appliance.rest_api.get(url_string.format('asc'))
assert_response(appliance)
assert 'resources' in response_asc
response_desc = appliance.rest_api.get(url_string.format('desc'))
assert_response(appliance)
assert 'resources' in response_desc
assert response_asc['subcount'] == response_desc['subcount']
id_last = 0
for resource in response_asc['resources']:
assert resource['id'] > id_last
id_last = resource['id']
id_last += 1
for resource in response_desc['resources']:
assert resource['id'] < id_last
id_last = resource['id']
PAGING_DATA = [
(0, 0),
(1, 0),
(11, 13),
(1, 10000),
]
@pytest.mark.uncollectif(lambda: current_version() < '5.9')
@pytest.mark.parametrize(
'paging', PAGING_DATA, ids=['{},{}'.format(d[0], d[1]) for d in PAGING_DATA])
@pytest.mark.meta(blockers=[
BZ(1489885, forced_streams=['5.9', 'upstream'], unblock=lambda paging: paging[0] != 0),
])
def test_rest_paging(appliance, paging):
"""Tests paging when offset and limit are specified.
Metadata:
test_flag: rest
"""
limit, offset = paging
url_string = '{}{}'.format(
appliance.rest_api.collections.features._href,
'?limit={}&offset={}'.format(limit, offset))
response = appliance.rest_api.get(url_string)
if response['count'] <= offset:
expected_subcount = 0
elif response['count'] - offset >= limit:
expected_subcount = limit
else:
expected_subcount = response['count'] - offset
assert response['subcount'] == expected_subcount
assert len(response['resources']) == expected_subcount
expected_pages_num = (response['count'] / limit) + (1 if response['count'] % limit else 0)
assert response['pages'] == expected_pages_num
links = response['links']
assert 'limit={}&offset={}'.format(limit, offset) in links['self']
if (offset + limit) < response['count']:
assert 'limit={}&offset={}'.format(limit, offset + limit) in links['next']
if offset > 0:
expected_previous_offset = offset - limit if offset > limit else 0
assert 'limit={}&offset={}'.format(limit, expected_previous_offset) in links['previous']
assert 'limit={}&offset={}'.format(limit, 0) in links['first']
expected_last_offset = (response['pages'] - (1 if limit > 1 else 0)) * limit
assert 'limit={}&offset={}'.format(limit, expected_last_offset) in links['last']
COLLECTIONS_BUGGY_HREF_SLUG = {'policy_actions', 'automate_domains'}
@pytest.mark.tier(3)
@pytest.mark.parametrize("collection_name", COLLECTIONS_ALL)
@pytest.mark.uncollectif(
lambda collection_name:
collection_name == 'automate' or # doesn't have 'href'
(collection_name in COLLECTIONS_ADDED_IN_58 and current_version() < '5.8') or
(collection_name in COLLECTIONS_REMOVED_IN_59 and current_version() >= '5.9')
)
@pytest.mark.meta(blockers=[BZ(
1485310,
forced_streams=['5.8', 'upstream'],
unblock=lambda collection_name: collection_name not in COLLECTIONS_BUGGY_HREF_SLUG)])
def test_attributes_present(appliance, collection_name):
"""Tests that the expected attributes are present in all collections.
Metadata:
test_flag: rest
"""
attrs = 'href,id,href_slug'
collection = getattr(appliance.rest_api.collections, collection_name)
response = appliance.rest_api.get(
'{0}{1}{2}'.format(collection._href, '?expand=resources&attributes=', attrs))
assert_response(appliance)
for resource in response.get('resources', []):
assert 'id' in resource
assert 'href' in resource
assert resource['href'] == '{}/{}'.format(collection._href, resource['id'])
if current_version() >= '5.8':
assert 'href_slug' in resource
assert resource['href_slug'] == '{}/{}'.format(collection.name, resource['id'])
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
@pytest.mark.parametrize('vendor', ['Microsoft', 'Redhat', 'Vmware'])
def test_collection_class_valid(appliance, a_provider, vendor):
"""Tests that it's possible to query using collection_class.
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.vms
resource_type = collection[0].type
tested_type = 'ManageIQ::Providers::{}::InfraManager::Vm'.format(vendor)
response = collection.query_string(collection_class=tested_type)
if resource_type == tested_type:
assert response.count > 0
# all returned entities must have the same type
if response.count:
rand_num = 5 if response.count >= 5 else response.count
rand_entities = random.sample(response, rand_num)
for entity in rand_entities:
assert entity.type == tested_type
@pytest.mark.uncollectif(lambda: current_version() < '5.8')
def test_collection_class_invalid(appliance):
"""Tests that it's not possible to query using invalid collection_class.
Metadata:
test_flag: rest
"""
with error.expected('Invalid collection_class'):
appliance.rest_api.collections.vms.query_string(
collection_class='ManageIQ::Providers::Nonexistent::Vm')
class TestBulkQueryRESTAPI(object):
def test_bulk_query(self, appliance):
"""Tests bulk query referencing resources by attributes id, href and guid
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.events
data0, data1, data2 = collection[0]._data, collection[1]._data, collection[2]._data
response = appliance.rest_api.collections.events.action.query(
{'id': data0['id']}, {'href': data1['href']}, {'guid': data2['guid']})
assert_response(appliance)
assert len(response) == 3
assert (data0 == response[0]._data and
data1 == response[1]._data and
data2 == response[2]._data)
def test_bulk_query_users(self, appliance):
"""Tests bulk query on 'users' collection
Metadata:
test_flag: rest
"""
data = appliance.rest_api.collections.users[0]._data
response = appliance.rest_api.collections.users.action.query(
{'name': data['name']}, {'userid': data['userid']})
assert_response(appliance)
assert len(response) == 2
assert data['id'] == response[0]._data['id'] == response[1]._data['id']
def test_bulk_query_roles(self, appliance):
"""Tests bulk query on 'roles' collection
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.roles
data0, data1 = collection[0]._data, collection[1]._data
response = appliance.rest_api.collections.roles.action.query(
{'name': data0['name']}, {'name': data1['name']})
assert_response(appliance)
assert len(response) == 2
assert data0 == response[0]._data and data1 == response[1]._data
def test_bulk_query_groups(self, appliance):
"""Tests bulk query on 'groups' collection
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.groups
data0, data1 = collection[0]._data, collection[1]._data
response = appliance.rest_api.collections.groups.action.query(
{'description': data0['description']}, {'description': data1['description']})
assert_response(appliance)
assert len(response) == 2
assert data0 == response[0]._data and data1 == response[1]._data
class TestArbitrationSettingsRESTAPI(object):
@pytest.fixture(scope='function')
def arbitration_settings(self, request, appliance):
num_settings = 2
response = _arbitration_settings(request, appliance.rest_api, num=num_settings)
assert_response(appliance)
assert len(response) == num_settings
return response
def test_create_arbitration_settings(self, appliance, arbitration_settings):
"""Tests create arbitration settings.
Metadata:
test_flag: rest
"""
for setting in arbitration_settings:
record = appliance.rest_api.collections.arbitration_settings.get(id=setting.id)
assert record._data == setting._data
@pytest.mark.parametrize('method', ['post', 'delete'])
def test_delete_arbitration_settings_from_detail(self, appliance, arbitration_settings, method):
"""Tests delete arbitration settings from detail.
Metadata:
test_flag: rest
"""
for setting in arbitration_settings:
setting.action.delete(force_method=method)
assert_response(appliance)
with error.expected('ActiveRecord::RecordNotFound'):
setting.action.delete(force_method=method)
assert_response(appliance, http_status=404)
def test_delete_arbitration_settings_from_collection(self, appliance, arbitration_settings):
"""Tests delete arbitration settings from collection.
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.arbitration_settings
collection.action.delete(*arbitration_settings)
assert_response(appliance)
with error.expected('ActiveRecord::RecordNotFound'):
collection.action.delete(*arbitration_settings)
assert_response(appliance, http_status=404)
@pytest.mark.parametrize(
"from_detail", [True, False],
ids=["from_detail", "from_collection"])
def test_edit_arbitration_settings(self, appliance, arbitration_settings, from_detail):
"""Tests edit arbitration settings.
Metadata:
test_flag: rest
"""
num_settings = len(arbitration_settings)
uniq = [fauxfactory.gen_alphanumeric(5) for _ in range(num_settings)]
new = [{'name': 'test_edit{}'.format(u), 'display_name': 'Test Edit{}'.format(u)}
for u in uniq]
if from_detail:
edited = []
for i in range(num_settings):
edited.append(arbitration_settings[i].action.edit(**new[i]))
assert_response(appliance)
else:
for i in range(num_settings):
new[i].update(arbitration_settings[i]._ref_repr())
edited = appliance.rest_api.collections.arbitration_settings.action.edit(*new)
assert_response(appliance)
assert len(edited) == num_settings
for i in range(num_settings):
assert (edited[i].name == new[i]['name'] and
edited[i].display_name == new[i]['display_name'])
class TestArbitrationRulesRESTAPI(object):
@pytest.fixture(scope='function')
def arbitration_rules(self, request, appliance):
num_rules = 2
response = _arbitration_rules(request, appliance.rest_api, num=num_rules)
assert_response(appliance)
assert len(response) == num_rules
return response
@pytest.mark.uncollectif(lambda: current_version() >= '5.9')
def test_create_arbitration_rules(self, arbitration_rules, appliance):
"""Tests create arbitration rules.
Metadata:
test_flag: rest
"""
for rule in arbitration_rules:
record = appliance.rest_api.collections.arbitration_rules.get(id=rule.id)
assert record.description == rule.description
# there's no test for the DELETE method as it is not working and won't be fixed, see BZ 1410504
@pytest.mark.uncollectif(lambda: current_version() >= '5.9')
def test_delete_arbitration_rules_from_detail_post(self, arbitration_rules, appliance):
"""Tests delete arbitration rules from detail.
Metadata:
test_flag: rest
"""
for entity in arbitration_rules:
entity.action.delete.POST()
assert_response(appliance)
with error.expected('ActiveRecord::RecordNotFound'):
entity.action.delete.POST()
assert_response(appliance, http_status=404)
@pytest.mark.uncollectif(lambda: current_version() >= '5.9')
def test_delete_arbitration_rules_from_collection(self, arbitration_rules, appliance):
"""Tests delete arbitration rules from collection.
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.arbitration_rules
collection.action.delete(*arbitration_rules)
assert_response(appliance)
with error.expected('ActiveRecord::RecordNotFound'):
collection.action.delete(*arbitration_rules)
assert_response(appliance, http_status=404)
@pytest.mark.uncollectif(lambda: current_version() >= '5.9')
@pytest.mark.parametrize(
'from_detail', [True, False],
ids=['from_detail', 'from_collection'])
def test_edit_arbitration_rules(self, arbitration_rules, appliance, from_detail):
"""Tests edit arbitration rules.
Metadata:
test_flag: rest
"""
num_rules = len(arbitration_rules)
uniq = [fauxfactory.gen_alphanumeric(5) for _ in range(num_rules)]
new = [{'description': 'new test admin rule {}'.format(u)} for u in uniq]
if from_detail:
edited = []
for i in range(num_rules):
edited.append(arbitration_rules[i].action.edit(**new[i]))
assert_response(appliance)
else:
for i in range(num_rules):
new[i].update(arbitration_rules[i]._ref_repr())
edited = appliance.rest_api.collections.arbitration_rules.action.edit(*new)
assert_response(appliance)
assert len(edited) == num_rules
for i in range(num_rules):
assert edited[i].description == new[i]['description']
class TestNotificationsRESTAPI(object):
@pytest.fixture(scope='function')
def generate_notifications(self, appliance):
requests_data = automation_requests_data('nonexistent_vm')
requests = appliance.rest_api.collections.automation_requests.action.create(
*requests_data[:2])
assert len(requests) == 2
wait_for_requests(requests)
@pytest.mark.parametrize(
'from_detail', [True, False],
ids=['from_detail', 'from_collection'])
def test_mark_notifications(self, appliance, generate_notifications, from_detail):
"""Tests marking notifications as seen.
Metadata:
test_flag: rest
"""
unseen = appliance.rest_api.collections.notifications.find_by(seen=False)
notifications = [unseen[-i] for i in range(1, 3)]
if from_detail:
for ent in notifications:
ent.action.mark_as_seen()
assert_response(appliance)
else:
appliance.rest_api.collections.notifications.action.mark_as_seen(*notifications)
assert_response(appliance)
for ent in notifications:
ent.reload()
assert ent.seen
@pytest.mark.parametrize('method', ['post', 'delete'])
def test_delete_notifications_from_detail(self, appliance, generate_notifications, method):
"""Tests delete notifications from detail.
Metadata:
test_flag: rest
"""
if method == 'delete' and BZ('1420872', forced_streams=['5.7', '5.8', 'upstream']).blocks:
pytest.skip("Affected by BZ1420872, cannot test.")
collection = appliance.rest_api.collections.notifications
collection.reload()
notifications = [collection[-i] for i in range(1, 3)]
for entity in notifications:
entity.action.delete(force_method=method)
assert_response(appliance)
with error.expected('ActiveRecord::RecordNotFound'):
entity.action.delete(force_method=method)
assert_response(appliance, http_status=404)
def test_delete_notifications_from_collection(self, appliance, generate_notifications):
"""Tests delete notifications from collection.
Metadata:
test_flag: rest
"""
collection = appliance.rest_api.collections.notifications
collection.reload()
notifications = [collection[-i] for i in range(1, 3)]
collection.action.delete(*notifications)
assert_response(appliance)
with error.expected("ActiveRecord::RecordNotFound"):
collection.action.delete(*notifications)
assert_response(appliance, http_status=404)
|
gpl-2.0
| 4,843,326,511,060,586,000
| 36.357724
| 100
| 0.656801
| false
| 3.828255
| true
| false
| false
|
bloem-project/bloem-server
|
files/models.py
|
1
|
3838
|
# -*- coding: utf-8 -*-
"""Model definitions for Bloem's files application.
This module defines the various models used as part of Bloem's files
application.
"""
import os
from django.db import models
class Directory(models.Model):
"""Defines the Directory model used in Bloem's files application.
Fields:
path (CharField): Path of the directory.
"""
path = models.CharField(max_length=4096)
class Meta:
verbose_name = "directory"
verbose_name_plural = "directories"
class Namespace(models.Model):
"""Defines the Namespace model used in Bloem's files application.
Fields:
name (CharField): Name of the namespace.
"""
name = models.CharField(unique=True, max_length=64)
class Meta:
verbose_name = "namespace"
verbose_name_plural = "namespaces"
class Tag(models.Model):
"""Defines the Tag model used in Bloem's files application.
Fields:
name (CharField): Name of the tag.
namespace (ForeignKey): Points to the namespace.
"""
name = models.CharField(unique=True, max_length=64)
namespace = models.ForeignKey(Namespace, on_delete=models.CASCADE)
class Meta:
verbose_name = "tag"
verbose_name_plural = "tags"
class File(models.Model):
"""Defines the File model used in Bloem's files application.
Fields:
hash (CharField): SHA256 hash of the file.
file_name (CharField): Name of the file.
path (CharField): Absolute path of the file, excluding the actual
filename.
date_added (DateTimeField): Date and time when the file was added to
the database.
date_modified (DateTimeField): Date and time when the file was modified
in the database.
"""
hash = models.CharField(max_length=64, unique=True)
file_name = models.CharField(max_length=256)
directory = models.ForeignKey(Directory, on_delete=models.CASCADE)
path = models.CharField(max_length=4096)
date_added = models.DateTimeField(auto_now_add=True)
date_modified = models.DateTimeField(auto_now=True)
tags = models.ManyToManyField(Tag, blank=True)
def _get_full_path(self):
return os.path.join(self.path, self.file_name)
full_path = property(_get_full_path)
def __str__(self):
"""Output the file's name."""
return self.file_name
class Meta:
ordering = ["file_name"]
get_latest_by = "date_added"
verbose_name = "file"
verbose_name_plural = "files"
class InboxItem(models.Model):
"""Defines the InboxItem model used in Bloem's files application.
Fields:
file (OneToOneField): Points to the File object.
"""
file = models.OneToOneField(File)
def __str__(self):
"""Output the file's name."""
return self.file.file_name
class Meta:
verbose_name = "inbox item"
verbose_name_plural = "inbox items"
class Person(models.Model):
"""Defines the Person model used in Bloem's files application.
This model is deliberately meant to be as wide as
possible, with all fields being optional to allow
users to choose which field they wish to fill for
each person at their own discretion.
Fields:
"""
MALE = 'ML'
FEMALE = 'FM'
GENDER_CHOICES = (
(MALE, 'Male'),
(FEMALE, 'Female'),
)
first_name = models.CharField(blank=True, null=True, max_length=64)
last_name = models.CharField(blank=True, null=True, max_length=64)
gender = models.CharField(max_length=2, blank=True, null=True)
date_of_birth = models.DateField(blank=True, null=True)
class Meta:
verbose_name = "person"
verbose_name_plural = "persons"
|
gpl-3.0
| 7,179,614,170,286,121,000
| 27.857143
| 79
| 0.633924
| false
| 4.006263
| false
| false
| false
|
mvpossum/machine-learning
|
tp4/plot_table.py
|
1
|
1620
|
#! /usr/bin/env python
import sys
import os
from sys import argv
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
FILE = argv[1]
PLOT_FILE = os.path.splitext(FILE)[0]+'.png'
ERROR = 'er' in FILE.lower()
legend = argv[2:]
cols = len(legend)
if cols>=4:
linestyles = ['--', '-', '--', '-', '--', '-', '--', '-', '--', '-', '--', '-']
colors = ['r', 'r', 'b', 'b', 'g', 'g', 'orange', 'orange', 'purple', 'purple', 'y', 'y', 'gray', 'gray']
elif cols==3:
linestyles = ['-', '-', '-']
colors = ['b', 'g', 'r']
else:
linestyles = ['-','-']
colors = ['r', 'b']
x = []
y = [[] for _ in range(cols)]
for line in open(FILE):
if line.strip():
line = [float(s) for s in line.split(' ') if s.strip()]
x.append(line[0])
for j in range(cols):
y[j].append(line[j+1])
fig, ax = plt.subplots()
ax = plt.subplot(111)
FONT_SIZE = 16
for label in (ax.get_xticklabels() + ax.get_yticklabels()):
label.set_fontsize(FONT_SIZE)
for yv in range(cols):
ax.plot(x, y[yv], label=legend[yv], linestyle=linestyles[yv], color=colors[yv])
#~ if ERROR:
#ax.set_ylim(9,60)
#~ else:
#~ ax.set_ylim(0,30)
#ax.set_xlim(0,128)
box = ax.get_position()
ax.set_position([box.x0, box.y0, box.width * 0.62, box.height])
ax.legend(prop={'size':FONT_SIZE}, bbox_to_anchor=(1, 1.0))
plt.xlabel('Dimensiones', size=FONT_SIZE)
#~ plt.xscale('log')
ylabel = 'Error (%)' if ERROR else 'Cantidad de nodos del árbol'
plt.ylabel(ylabel, size=FONT_SIZE)
plt.savefig(PLOT_FILE)
plt.show()
|
mit
| -6,190,329,077,399,839,000
| 24.296875
| 109
| 0.57937
| false
| 2.684909
| false
| false
| false
|
zanardob/django-pizza
|
pizzeria/pizzeria/settings.py
|
1
|
2661
|
"""
Django settings for pizzeria project.
Generated by 'django-admin startproject' using Django 1.8.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import dj_database_url
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'i!%!frm&u7pf5bqmev#n*dp%vovkwbb33s1n@gycfr1su_c9bl'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pizza'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'pizzeria.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'pizzeria.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': dj_database_url.config()
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDUA_ROOT = os.path.join(BASE_DIR, 'media')
|
cc0-1.0
| -1,323,239,971,014,439,400
| 24.834951
| 71
| 0.699737
| false
| 3.40717
| false
| false
| false
|
cinemapub/bright-response
|
scripts/lib/foursquare/foursquare/tests/test_events.py
|
1
|
1068
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
# (c) 2013 Mike Lewis
import logging; log = logging.getLogger(__name__)
from . import BaseAuthenticatedEndpointTestCase, BaseUserlessEndpointTestCase
class EventsEndpointTestCase(BaseAuthenticatedEndpointTestCase):
"""
General
"""
def test_event(self):
response = self.api.events(self.default_eventid)
assert 'event' in response
def test_categories(self):
response = self.api.events.categories()
assert 'categories' in response
def test_search(self):
response = self.api.events.search({'domain': u'songkick.com', 'eventId': u'8183976'})
assert 'events' in response
class EventsUserlessEndpointTestCase(BaseUserlessEndpointTestCase):
"""
General
"""
def test_categories(self):
response = self.api.events.categories()
assert 'categories' in response
def test_search(self):
response = self.api.events.search({'domain': u'songkick.com', 'eventId': u'8183976'})
assert 'events' in response
|
mit
| 7,469,222,720,811,461,000
| 25.04878
| 93
| 0.667603
| false
| 4
| true
| false
| false
|
msakai/pyubcsat
|
ubcsat.py
|
1
|
2406
|
import re
import subprocess
import sys
class Solver():
def __init__(self, ubcsat = "ubcsat"):
self._ubcsat = ubcsat
self._nvar = 0
self._clauses = []
self._soft_clauses = []
def newvar(self):
self._nvar += 1
return self._nvar
def add_clause(self, clause):
self._clauses.append(clause)
def add_soft_clause(self, clause, weight = 1):
self._soft_clauses.append((weight, clause))
def _write_wcnf(self, file):
top = sum(w for w, _ in self._soft_clauses) + 1
file.write("p wcnf %d %d %d\n" % (self._nvar, len(self._clauses) + len(self._soft_clauses), top))
for clause in self._clauses:
file.write(str(top))
for lit in clause:
file.write(" ")
file.write(str(lit))
file.write(" 0\n")
for w, clause in self._soft_clauses:
file.write(str(w))
for lit in clause:
file.write(" ")
file.write(str(lit))
file.write(" 0\n")
file.flush()
return top
def run(self):
cmd = [self._ubcsat, "-w", "-alg", "irots", "-seed", "0", "-runs", "10", "-solve", "-r", "bestsol"]
popen = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
top = self._write_wcnf(popen.stdin)
try:
for line in popen.stdout:
sys.stdout.write(line)
sys.stdout.flush()
m = re.match(r"^\d+ [01] (\d+) ([01]+)$", line)
if m:
obj, model = m.groups()
obj = int(obj)
if obj < top:
model = [None] + [c=='1' for c in model]
yield (obj, model)
finally:
popen.terminate()
def optimize(self):
bestobj = None
bestmodel = None
for (obj, model) in self.run():
if bestobj is None or obj < bestobj:
bestobj, bestmodel = obj, model
return bestobj, bestmodel
if __name__ == '__main__':
solver = Solver()
for i in xrange(4):
solver.newvar()
solver.add_clause([1, -2, 4])
solver.add_clause([-1, -2, 3])
solver.add_soft_clause([-2, -4], 8)
solver.add_soft_clause([-3, 2], 4)
solver.add_soft_clause([3, 1], 3)
print(solver.optimize())
|
bsd-3-clause
| -7,839,600,136,086,166,000
| 29.075
| 107
| 0.489194
| false
| 3.601796
| false
| false
| false
|
ajiwo/xiboside
|
xlf.py
|
1
|
3207
|
from xml.etree import ElementTree
import logging
log = logging.getLogger('xiboside.xlf')
def parse_file(path):
layout = None
try:
_xlf = Xlf(path)
except ElementTree.ParseError, err:
log.error(err.message)
return None
except IOError, err:
log.error("%s: %s" % (err.strerror, err.filename))
return None
if _xlf.layout:
layout = dict(_xlf.layout)
_xlf = None
del _xlf
return layout
class Xlf:
def __init__(self, path=None):
self.layout = None
self.region = None
self.media = None
if path:
self.parse(path)
def parse(self, path):
layout = {
'width': '',
'height': '',
'bgcolor': '',
'background': '',
'regions': [],
'tags': []
}
tree = ElementTree.parse(path)
root = tree.getroot()
if 'layout' != root.tag:
self.layout = None
return None
for k, v in root.attrib.iteritems():
if k in layout:
layout[k] = v
for child in root:
if 'region' == child.tag:
region = self.__parse_region(child)
if region:
layout['regions'].append(region)
elif 'tags' == child.tag:
for tag in child:
layout['tags'].append(tag.text)
self.layout = layout
return layout
def __parse_region(self, node):
if node is None:
self.region = None
return None
region = {
'id': '',
'width': '',
'height': '',
'left': '',
'top': '',
'userId': '',
'zindex': '0',
'media': [],
'options': {}
}
for k, v in node.attrib.iteritems():
if k in region:
region[k] = v
for child in node:
if 'media' == child.tag:
media = self.__parse_media(child)
if media:
region['media'].append(media)
elif 'options' == child.tag:
for option in child:
if option.text:
region['options'][option.tag] = option.text
self.region = region
return region
def __parse_media(self, node):
if node is None:
self.media = None
return None
media = {
'id': '',
'type': '',
'duration': '',
'render': '',
'options': {},
'raws': {}
}
for k, v in node.attrib.iteritems():
if k in media:
media[k] = v
for child in node:
if 'options' == child.tag:
for option in child:
if option.text:
media['options'][option.tag] = option.text
elif 'raw' == child.tag:
for raw in child:
if raw.text:
media['raws'][raw.tag] = raw.text
self.media = media
return media
|
agpl-3.0
| -4,592,609,816,481,283,000
| 24.862903
| 67
| 0.428438
| false
| 4.447989
| false
| false
| false
|
gltn/stdm
|
stdm/third_party/sqlalchemy/inspection.py
|
1
|
3030
|
# sqlalchemy/inspect.py
# Copyright (C) 2005-2020 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""The inspection module provides the :func:`_sa.inspect` function,
which delivers runtime information about a wide variety
of SQLAlchemy objects, both within the Core as well as the
ORM.
The :func:`_sa.inspect` function is the entry point to SQLAlchemy's
public API for viewing the configuration and construction
of in-memory objects. Depending on the type of object
passed to :func:`_sa.inspect`, the return value will either be
a related object which provides a known interface, or in many
cases it will return the object itself.
The rationale for :func:`_sa.inspect` is twofold. One is that
it replaces the need to be aware of a large variety of "information
getting" functions in SQLAlchemy, such as
:meth:`_reflection.Inspector.from_engine`,
:func:`.orm.attributes.instance_state`, :func:`_orm.class_mapper`,
and others. The other is that the return value of :func:`_sa.inspect`
is guaranteed to obey a documented API, thus allowing third party
tools which build on top of SQLAlchemy configurations to be constructed
in a forwards-compatible way.
"""
from . import exc
from . import util
_registrars = util.defaultdict(list)
def inspect(subject, raiseerr=True):
"""Produce an inspection object for the given target.
The returned value in some cases may be the
same object as the one given, such as if a
:class:`_orm.Mapper` object is passed. In other
cases, it will be an instance of the registered
inspection type for the given object, such as
if an :class:`_engine.Engine` is passed, an
:class:`_reflection.Inspector` object is returned.
:param subject: the subject to be inspected.
:param raiseerr: When ``True``, if the given subject
does not
correspond to a known SQLAlchemy inspected type,
:class:`sqlalchemy.exc.NoInspectionAvailable`
is raised. If ``False``, ``None`` is returned.
"""
type_ = type(subject)
for cls in type_.__mro__:
if cls in _registrars:
reg = _registrars[cls]
if reg is True:
return subject
ret = reg(subject)
if ret is not None:
break
else:
reg = ret = None
if raiseerr and (reg is None or ret is None):
raise exc.NoInspectionAvailable(
"No inspection system is "
"available for object of type %s" % type_
)
return ret
def _inspects(*types):
def decorate(fn_or_cls):
for type_ in types:
if type_ in _registrars:
raise AssertionError(
"Type %s is already " "registered" % type_
)
_registrars[type_] = fn_or_cls
return fn_or_cls
return decorate
def _self_inspects(cls):
_inspects(cls)(True)
return cls
|
gpl-2.0
| -4,353,928,173,663,887,400
| 31.580645
| 72
| 0.671617
| false
| 4.029255
| false
| false
| false
|
eJon/enjoy
|
main.py
|
1
|
1373
|
# -*- coding:utf-8 -*-
#!/usr/bin/env python
__author__ = 'Leo'
import tornado.httpserver
import tornado.ioloop
import tornado.options
import tornado.web
from tornado.options import define, options
from game.game_server import GameApplication
define("port", default=8000, type=int, metavar="SERVER PORT", help="Run on the given port")
define("config", default="conf/server.conf", type=str, metavar="CONFIG FILE", help="Server configuration file")
define("data", default="./data/default", type=str, metavar="DATA FILE", help="Server data file")
define("user_group", type=str, metavar="USER GROUP", help="User Group")
# database config
define("database", default="", type=str, metavar="DATABASE", help="Server database")
define("db_host", default="127.0.0.1", type=str, metavar="HOST", help="Server database host")
define("db_user", default="root", type=str, metavar="USER", help="Server database user")
define("db_password", default="123456", type=str, metavar="PASSWORD", help="Server database password")
define("db_connect_num", default=5, type=int, metavar="NUM", help="Connect DB Number")
def main():
app = GameApplication()
app.prepare_application()
print "Running ..."
http_server = tornado.httpserver.HTTPServer(app)
http_server.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
if __name__ == "__main__":
main()
|
gpl-3.0
| -1,949,856,449,309,619,000
| 31.690476
| 111
| 0.706482
| false
| 3.632275
| false
| false
| false
|
metinkilicse/pyTurEng
|
pyTurEng/pyTurEng.py
|
1
|
1308
|
#!/usr/bin/env python
#-*- coding:utf-8 -*-
from __future__ import absolute_import
from TurEng import TurEng
import sys
import os
args = sys.argv
if len(args)==4:
dic = args[1]
lang = args[2]
query = args[3]
dic_obj = TurEng()
dic_obj.change_url(dic)
if lang == "en":
result = dic_obj.get_meaning(query,"tr ts")
else:
result = dic_obj.get_meaning(query,"en tm")
types, meaning = result[0],result[1]
if len(meaning)==5:
for i in range(5):
print("{} : {}".format(types[i].text,meaning[i].text))
else:
if len(meaning)==0:
print("No Result")
else:
print("{} : {}".format(types[1].text,meaning[0].text))
elif len(args)==5:
dic = args[1]
lang = args[2]
input_file = args[3]
output_file = args[4]
try:
dic_obj = TurEng()
dic_obj.change_url(dic)
if os.path.exists(input_file):
if lang == "en":
dic_obj.search_from_file(input_file,output_file,"tr ts")
else:
dic_obj.search_from_file(input_file,output_file,"en tm")
else:
print("File Does Not Exist")
except Exception as e:
print("Error : {} : line : {}".format(e,sys.exc_info()[2].tb_lineno))
exit()
else:
print("Use as :\n'python pyTurEng.py tren tr merhaba\nor\n"
"python pyTurEng.py tren en \"go away\"\nor\n"
"python pyTurEng.py tren en wordlist.txt outlist.txt")
|
gpl-3.0
| 5,442,897,099,161,561,000
| 23.679245
| 71
| 0.626147
| false
| 2.544747
| false
| false
| false
|
scylladb/seastar
|
scripts/perftune.py
|
1
|
61933
|
#!/usr/bin/env python3
import abc
import argparse
import distutils.util
import enum
import functools
import glob
import itertools
import logging
import multiprocessing
import os
import pathlib
import pyudev
import re
import shutil
import subprocess
import sys
import urllib.request
import yaml
import platform
import shlex
dry_run_mode = False
def perftune_print(log_msg, *args, **kwargs):
if dry_run_mode:
log_msg = "# " + log_msg
print(log_msg, *args, **kwargs)
def __run_one_command(prog_args, stderr=None, check=True):
proc = subprocess.Popen(prog_args, stdout = subprocess.PIPE, stderr = stderr)
outs, errs = proc.communicate()
outs = str(outs, 'utf-8')
if check and proc.returncode != 0:
raise subprocess.CalledProcessError(returncode=proc.returncode, cmd=" ".join(prog_args), output=outs, stderr=errs)
return outs
def run_one_command(prog_args, stderr=None, check=True):
if dry_run_mode:
print(" ".join([shlex.quote(x) for x in prog_args]))
else:
__run_one_command(prog_args, stderr=stderr, check=check)
def run_read_only_command(prog_args, stderr=None, check=True):
return __run_one_command(prog_args, stderr=stderr, check=check)
def run_hwloc_distrib(prog_args):
"""
Returns a list of strings - each representing a single line of hwloc-distrib output.
"""
return run_read_only_command(['hwloc-distrib'] + prog_args).splitlines()
def run_hwloc_calc(prog_args):
"""
Returns a single string with the result of the execution.
"""
return run_read_only_command(['hwloc-calc'] + prog_args).rstrip()
def run_ethtool(prog_args):
"""
Returns a list of strings - each representing a single line of ethtool output.
"""
return run_read_only_command(['ethtool'] + prog_args).splitlines()
def fwriteln(fname, line, log_message, log_errors=True):
try:
if dry_run_mode:
print("echo {} > {}".format(line, fname))
return
else:
with open(fname, 'w') as f:
f.write(line)
print(log_message)
except:
if log_errors:
print("{}: failed to write into {}: {}".format(log_message, fname, sys.exc_info()))
def readlines(fname):
try:
with open(fname, 'r') as f:
return f.readlines()
except:
print("Failed to read {}: {}".format(fname, sys.exc_info()))
return []
def fwriteln_and_log(fname, line, log_errors=True):
msg = "Writing '{}' to {}".format(line, fname)
fwriteln(fname, line, log_message=msg, log_errors=log_errors)
double_commas_pattern = re.compile(',,')
def set_one_mask(conf_file, mask, log_errors=True):
if not os.path.exists(conf_file):
raise Exception("Configure file to set mask doesn't exist: {}".format(conf_file))
mask = re.sub('0x', '', mask)
while double_commas_pattern.search(mask):
mask = double_commas_pattern.sub(',0,', mask)
msg = "Setting mask {} in {}".format(mask, conf_file)
fwriteln(conf_file, mask, log_message=msg, log_errors=log_errors)
def distribute_irqs(irqs, cpu_mask, log_errors=True):
# If IRQs' list is empty - do nothing
if not irqs:
return
for i, mask in enumerate(run_hwloc_distrib(["{}".format(len(irqs)), '--single', '--restrict', cpu_mask])):
set_one_mask("/proc/irq/{}/smp_affinity".format(irqs[i]), mask, log_errors=log_errors)
def is_process_running(name):
return len(list(filter(lambda ps_line : not re.search('<defunct>', ps_line), run_read_only_command(['ps', '--no-headers', '-C', name], check=False).splitlines()))) > 0
def restart_irqbalance(banned_irqs):
"""
Restart irqbalance if it's running and ban it from moving the IRQs from the
given list.
"""
config_file = '/etc/default/irqbalance'
options_key = 'OPTIONS'
systemd = False
banned_irqs_list = list(banned_irqs)
# If there is nothing to ban - quit
if not banned_irqs_list:
return
# return early if irqbalance is not running
if not is_process_running('irqbalance'):
perftune_print("irqbalance is not running")
return
# If this file exists - this a "new (systemd) style" irqbalance packaging.
# This type of packaging uses IRQBALANCE_ARGS as an option key name, "old (init.d) style"
# packaging uses an OPTION key.
if os.path.exists('/lib/systemd/system/irqbalance.service'):
options_key = 'IRQBALANCE_ARGS'
systemd = True
if not os.path.exists(config_file):
if os.path.exists('/etc/sysconfig/irqbalance'):
config_file = '/etc/sysconfig/irqbalance'
elif os.path.exists('/etc/conf.d/irqbalance'):
config_file = '/etc/conf.d/irqbalance'
options_key = 'IRQBALANCE_OPTS'
with open('/proc/1/comm', 'r') as comm:
systemd = 'systemd' in comm.read()
else:
perftune_print("Unknown system configuration - not restarting irqbalance!")
perftune_print("You have to prevent it from moving IRQs {} manually!".format(banned_irqs_list))
return
orig_file = "{}.scylla.orig".format(config_file)
# Save the original file
if not dry_run_mode:
if not os.path.exists(orig_file):
print("Saving the original irqbalance configuration is in {}".format(orig_file))
shutil.copyfile(config_file, orig_file)
else:
print("File {} already exists - not overwriting.".format(orig_file))
# Read the config file lines
cfile_lines = open(config_file, 'r').readlines()
# Build the new config_file contents with the new options configuration
perftune_print("Restarting irqbalance: going to ban the following IRQ numbers: {} ...".format(", ".join(banned_irqs_list)))
# Search for the original options line
opt_lines = list(filter(lambda line : re.search("^\s*{}".format(options_key), line), cfile_lines))
if not opt_lines:
new_options = "{}=\"".format(options_key)
elif len(opt_lines) == 1:
# cut the last "
new_options = re.sub("\"\s*$", "", opt_lines[0].rstrip())
opt_lines = opt_lines[0].strip()
else:
raise Exception("Invalid format in {}: more than one lines with {} key".format(config_file, options_key))
for irq in banned_irqs_list:
# prevent duplicate "ban" entries for the same IRQ
patt_str = "\-\-banirq\={}\Z|\-\-banirq\={}\s".format(irq, irq)
if not re.search(patt_str, new_options):
new_options += " --banirq={}".format(irq)
new_options += "\""
if dry_run_mode:
if opt_lines:
print("sed -i 's/^{}/#{}/g' {}".format(options_key, options_key, config_file))
print("echo {} | tee -a {}".format(new_options, config_file))
else:
with open(config_file, 'w') as cfile:
for line in cfile_lines:
if not re.search("^\s*{}".format(options_key), line):
cfile.write(line)
cfile.write(new_options + "\n")
if systemd:
perftune_print("Restarting irqbalance via systemctl...")
run_one_command(['systemctl', 'try-restart', 'irqbalance'])
else:
perftune_print("Restarting irqbalance directly (init.d)...")
run_one_command(['/etc/init.d/irqbalance', 'restart'])
def learn_irqs_from_proc_interrupts(pattern, irq2procline):
return [ irq for irq, proc_line in filter(lambda irq_proc_line_pair : re.search(pattern, irq_proc_line_pair[1]), irq2procline.items()) ]
def learn_all_irqs_one(irq_conf_dir, irq2procline, xen_dev_name):
"""
Returns a list of IRQs of a single device.
irq_conf_dir: a /sys/... directory with the IRQ information for the given device
irq2procline: a map of IRQs to the corresponding lines in the /proc/interrupts
xen_dev_name: a device name pattern as it appears in the /proc/interrupts on Xen systems
"""
msi_irqs_dir_name = os.path.join(irq_conf_dir, 'msi_irqs')
# Device uses MSI IRQs
if os.path.exists(msi_irqs_dir_name):
return os.listdir(msi_irqs_dir_name)
irq_file_name = os.path.join(irq_conf_dir, 'irq')
# Device uses INT#x
if os.path.exists(irq_file_name):
return [ line.lstrip().rstrip() for line in open(irq_file_name, 'r').readlines() ]
# No irq file detected
modalias = open(os.path.join(irq_conf_dir, 'modalias'), 'r').readline()
# virtio case
if re.search("^virtio", modalias):
return list(itertools.chain.from_iterable(
map(lambda dirname : learn_irqs_from_proc_interrupts(dirname, irq2procline),
filter(lambda dirname : re.search('virtio', dirname),
itertools.chain.from_iterable([ dirnames for dirpath, dirnames, filenames in os.walk(os.path.join(irq_conf_dir, 'driver')) ])))))
# xen case
if re.search("^xen:", modalias):
return learn_irqs_from_proc_interrupts(xen_dev_name, irq2procline)
return []
def get_irqs2procline_map():
return { line.split(':')[0].lstrip().rstrip() : line for line in open('/proc/interrupts', 'r').readlines() }
################################################################################
class PerfTunerBase(metaclass=abc.ABCMeta):
def __init__(self, args):
self.__args = args
self.__args.cpu_mask = run_hwloc_calc(['--restrict', self.__args.cpu_mask, 'all'])
self.__mode = None
self.__irq_cpu_mask = args.irq_cpu_mask
if self.__irq_cpu_mask:
self.__compute_cpu_mask = run_hwloc_calc([self.__args.cpu_mask, "~{}".format(self.__irq_cpu_mask)])
else:
self.__compute_cpu_mask = None
self.__is_aws_i3_nonmetal_instance = None
#### Public methods ##########################
class CPUMaskIsZeroException(Exception):
"""Thrown if CPU mask turns out to be zero"""
pass
class SupportedModes(enum.IntEnum):
"""
Modes are ordered from the one that cuts the biggest number of CPUs
from the compute CPUs' set to the one that takes the smallest ('mq' doesn't
cut any CPU from the compute set).
This fact is used when we calculate the 'common quotient' mode out of a
given set of modes (e.g. default modes of different Tuners) - this would
be the smallest among the given modes.
"""
sq_split = 0
sq = 1
mq = 2
# Note: no_irq_restrictions should always have the greatest value in the enum since it's the least restricting mode.
no_irq_restrictions = 9999
@staticmethod
def names():
return PerfTunerBase.SupportedModes.__members__.keys()
@staticmethod
def combine(modes):
"""
:param modes: a set of modes of the PerfTunerBase.SupportedModes type
:return: the mode that is the "common ground" for a given set of modes.
"""
# Perform an explicit cast in order to verify that the values in the 'modes' are compatible with the
# expected PerfTunerBase.SupportedModes type.
return min([PerfTunerBase.SupportedModes(m) for m in modes])
@staticmethod
def cpu_mask_is_zero(cpu_mask):
"""
The irqs_cpu_mask is a coma-separated list of 32-bit hex values, e.g. 0xffff,0x0,0xffff
We want to estimate if the whole mask is all-zeros.
:param cpu_mask: hwloc-calc generated CPU mask
:return: True if mask is zero, False otherwise
"""
for cur_irqs_cpu_mask in cpu_mask.split(','):
if int(cur_irqs_cpu_mask, 16) != 0:
return False
return True
@staticmethod
def compute_cpu_mask_for_mode(mq_mode, cpu_mask):
mq_mode = PerfTunerBase.SupportedModes(mq_mode)
irqs_cpu_mask = 0
if mq_mode == PerfTunerBase.SupportedModes.sq:
# all but CPU0
irqs_cpu_mask = run_hwloc_calc([cpu_mask, '~PU:0'])
elif mq_mode == PerfTunerBase.SupportedModes.sq_split:
# all but CPU0 and its HT siblings
irqs_cpu_mask = run_hwloc_calc([cpu_mask, '~core:0'])
elif mq_mode == PerfTunerBase.SupportedModes.mq:
# all available cores
irqs_cpu_mask = cpu_mask
elif mq_mode == PerfTunerBase.SupportedModes.no_irq_restrictions:
# all available cores
irqs_cpu_mask = cpu_mask
else:
raise Exception("Unsupported mode: {}".format(mq_mode))
if PerfTunerBase.cpu_mask_is_zero(irqs_cpu_mask):
raise PerfTunerBase.CPUMaskIsZeroException("Bad configuration mode ({}) and cpu-mask value ({}): this results in a zero-mask for compute".format(mq_mode.name, cpu_mask))
return irqs_cpu_mask
@staticmethod
def irqs_cpu_mask_for_mode(mq_mode, cpu_mask):
mq_mode = PerfTunerBase.SupportedModes(mq_mode)
irqs_cpu_mask = 0
if mq_mode != PerfTunerBase.SupportedModes.mq and mq_mode != PerfTunerBase.SupportedModes.no_irq_restrictions:
irqs_cpu_mask = run_hwloc_calc([cpu_mask, "~{}".format(PerfTunerBase.compute_cpu_mask_for_mode(mq_mode, cpu_mask))])
else: # mq_mode == PerfTunerBase.SupportedModes.mq or mq_mode == PerfTunerBase.SupportedModes.no_irq_restrictions
# distribute equally between all available cores
irqs_cpu_mask = cpu_mask
if PerfTunerBase.cpu_mask_is_zero(irqs_cpu_mask):
raise PerfTunerBase.CPUMaskIsZeroException("Bad configuration mode ({}) and cpu-mask value ({}): this results in a zero-mask for IRQs".format(mq_mode.name, cpu_mask))
return irqs_cpu_mask
@property
def mode(self):
"""
Return the configuration mode
"""
# Make sure the configuration mode is set (see the __set_mode_and_masks() description).
if self.__mode is None:
self.__set_mode_and_masks()
return self.__mode
@mode.setter
def mode(self, new_mode):
"""
Set the new configuration mode and recalculate the corresponding masks.
"""
# Make sure the new_mode is of PerfTunerBase.AllowedModes type
self.__mode = PerfTunerBase.SupportedModes(new_mode)
self.__compute_cpu_mask = PerfTunerBase.compute_cpu_mask_for_mode(self.__mode, self.__args.cpu_mask)
self.__irq_cpu_mask = PerfTunerBase.irqs_cpu_mask_for_mode(self.__mode, self.__args.cpu_mask)
@property
def compute_cpu_mask(self):
"""
Return the CPU mask to use for seastar application binding.
"""
# see the __set_mode_and_masks() description
if self.__compute_cpu_mask is None:
self.__set_mode_and_masks()
return self.__compute_cpu_mask
@property
def irqs_cpu_mask(self):
"""
Return the mask of CPUs used for IRQs distribution.
"""
# see the __set_mode_and_masks() description
if self.__irq_cpu_mask is None:
self.__set_mode_and_masks()
return self.__irq_cpu_mask
@property
def is_aws_i3_non_metal_instance(self):
"""
:return: True if we are running on the AWS i3.nonmetal instance, e.g. i3.4xlarge
"""
if self.__is_aws_i3_nonmetal_instance is None:
self.__check_host_type()
return self.__is_aws_i3_nonmetal_instance
@property
def args(self):
return self.__args
@property
def irqs(self):
return self._get_irqs()
#### "Protected"/Public (pure virtual) methods ###########
@abc.abstractmethod
def tune(self):
pass
@abc.abstractmethod
def _get_def_mode(self):
"""
Return a default configuration mode.
"""
pass
@abc.abstractmethod
def _get_irqs(self):
"""
Return the iteratable value with all IRQs to be configured.
"""
pass
#### Private methods ############################
def __set_mode_and_masks(self):
"""
Sets the configuration mode and the corresponding CPU masks. We can't
initialize them in the constructor because the default mode may depend
on the child-specific values that are set in its constructor.
That's why we postpone the mode's and the corresponding masks'
initialization till after the child instance creation.
"""
if self.__args.mode:
self.mode = PerfTunerBase.SupportedModes[self.__args.mode]
else:
self.mode = self._get_def_mode()
def __check_host_type(self):
"""
Check if we are running on the AWS i3 nonmetal instance.
If yes, set self.__is_aws_i3_nonmetal_instance to True, and to False otherwise.
"""
try:
aws_instance_type = urllib.request.urlopen("http://169.254.169.254/latest/meta-data/instance-type", timeout=0.1).read().decode()
if re.match(r'^i3\.((?!metal)\w)+$', aws_instance_type):
self.__is_aws_i3_nonmetal_instance = True
else:
self.__is_aws_i3_nonmetal_instance = False
return
except (urllib.error.URLError, ConnectionError, TimeoutError):
# Non-AWS case
pass
except:
logging.warning("Unexpected exception while attempting to access AWS meta server: {}".format(sys.exc_info()[0]))
self.__is_aws_i3_nonmetal_instance = False
#################################################
class NetPerfTuner(PerfTunerBase):
def __init__(self, args):
super().__init__(args)
self.nics=args.nics
self.__nic_is_bond_iface = self.__check_dev_is_bond_iface()
self.__slaves = self.__learn_slaves()
# check that self.nics contain a HW device or a bonding interface
self.__check_nics()
self.__irqs2procline = get_irqs2procline_map()
self.__nic2irqs = self.__learn_irqs()
#### Public methods ############################
def tune(self):
"""
Tune the networking server configuration.
"""
for nic in self.nics:
if self.nic_is_hw_iface(nic):
perftune_print("Setting a physical interface {}...".format(nic))
self.__setup_one_hw_iface(nic)
else:
perftune_print("Setting {} bonding interface...".format(nic))
self.__setup_bonding_iface(nic)
# Increase the socket listen() backlog
fwriteln_and_log('/proc/sys/net/core/somaxconn', '4096')
# Increase the maximum number of remembered connection requests, which are still
# did not receive an acknowledgment from connecting client.
fwriteln_and_log('/proc/sys/net/ipv4/tcp_max_syn_backlog', '4096')
def nic_is_bond_iface(self, nic):
return self.__nic_is_bond_iface[nic]
def nic_exists(self, nic):
return self.__iface_exists(nic)
def nic_is_hw_iface(self, nic):
return self.__dev_is_hw_iface(nic)
def slaves(self, nic):
"""
Returns an iterator for all slaves of the nic.
If agrs.nic is not a bonding interface an attempt to use the returned iterator
will immediately raise a StopIteration exception - use __dev_is_bond_iface() check to avoid this.
"""
return iter(self.__slaves[nic])
#### Protected methods ##########################
def _get_def_mode(self):
mode=PerfTunerBase.SupportedModes.no_irq_restrictions
for nic in self.nics:
if self.nic_is_bond_iface(nic):
mode = min(mode, min(map(self.__get_hw_iface_def_mode, filter(self.__dev_is_hw_iface, self.slaves(nic)))))
else:
mode = min(mode, self.__get_hw_iface_def_mode(nic))
return mode
def _get_irqs(self):
"""
Returns the iterator for all IRQs that are going to be configured (according to args.nics parameter).
For instance, for a bonding interface that's going to include IRQs of all its slaves.
"""
return itertools.chain.from_iterable(self.__nic2irqs.values())
#### Private methods ############################
@property
def __rfs_table_size(self):
return 32768
def __check_nics(self):
"""
Checks that self.nics are supported interfaces
"""
for nic in self.nics:
if not self.nic_exists(nic):
raise Exception("Device {} does not exist".format(nic))
if not self.nic_is_hw_iface(nic) and not self.nic_is_bond_iface(nic):
raise Exception("Not supported virtual device {}".format(nic))
def __get_irqs_one(self, iface):
"""
Returns the list of IRQ numbers for the given interface.
"""
return self.__nic2irqs[iface]
def __setup_rfs(self, iface):
rps_limits = glob.glob("/sys/class/net/{}/queues/*/rps_flow_cnt".format(iface))
one_q_limit = int(self.__rfs_table_size / len(rps_limits))
# If RFS feature is not present - get out
try:
run_one_command(['sysctl', 'net.core.rps_sock_flow_entries'])
except:
return
# Enable RFS
perftune_print("Setting net.core.rps_sock_flow_entries to {}".format(self.__rfs_table_size))
run_one_command(['sysctl', '-w', 'net.core.rps_sock_flow_entries={}'.format(self.__rfs_table_size)])
# Set each RPS queue limit
for rfs_limit_cnt in rps_limits:
msg = "Setting limit {} in {}".format(one_q_limit, rfs_limit_cnt)
fwriteln(rfs_limit_cnt, "{}".format(one_q_limit), log_message=msg)
# Enable ntuple filtering HW offload on the NIC
ethtool_msg = "Enable ntuple filtering HW offload for {}...".format(iface)
if dry_run_mode:
perftune_print(ethtool_msg)
run_one_command(['ethtool','-K', iface, 'ntuple', 'on'], stderr=subprocess.DEVNULL)
else:
try:
print("Trying to enable ntuple filtering HW offload for {}...".format(iface), end='')
run_one_command(['ethtool','-K', iface, 'ntuple', 'on'], stderr=subprocess.DEVNULL)
print("ok")
except:
print("not supported")
def __setup_rps(self, iface, mask):
for one_rps_cpus in self.__get_rps_cpus(iface):
set_one_mask(one_rps_cpus, mask)
self.__setup_rfs(iface)
def __setup_xps(self, iface):
xps_cpus_list = glob.glob("/sys/class/net/{}/queues/*/xps_cpus".format(iface))
masks = run_hwloc_distrib(["{}".format(len(xps_cpus_list))])
for i, mask in enumerate(masks):
set_one_mask(xps_cpus_list[i], mask)
def __iface_exists(self, iface):
if len(iface) == 0:
return False
return os.path.exists("/sys/class/net/{}".format(iface))
def __dev_is_hw_iface(self, iface):
return os.path.exists("/sys/class/net/{}/device".format(iface))
def __check_dev_is_bond_iface(self):
bond_dict = {}
if not os.path.exists('/sys/class/net/bonding_masters'):
for nic in self.nics:
bond_dict[nic] = False
#return False for every nic
return bond_dict
for nic in self.nics:
bond_dict[nic] = any([re.search(nic, line) for line in open('/sys/class/net/bonding_masters', 'r').readlines()])
return bond_dict
def __learn_slaves(self):
slaves_list_per_nic = {}
for nic in self.nics:
if self.nic_is_bond_iface(nic):
slaves_list_per_nic[nic] = list(itertools.chain.from_iterable([line.split() for line in open("/sys/class/net/{}/bonding/slaves".format(nic), 'r').readlines()]))
return slaves_list_per_nic
def __intel_irq_to_queue_idx(self, irq):
"""
Return the HW queue index for a given IRQ for Intel NICs in order to sort the IRQs' list by this index.
Intel's fast path IRQs have the following name convention:
<bla-bla>-TxRx-<queue index>
Intel NICs also have the IRQ for Flow Director (which is not a regular fast path IRQ) which name looks like
this:
<bla-bla>:fdir-TxRx-<index>
We want to put the Flow Director's IRQ at the end of the sorted list of IRQs.
:param irq: IRQ number
:return: HW queue index for Intel NICs and 0 for all other NICs
"""
intel_fp_irq_re = re.compile("\-TxRx\-(\d+)")
fdir_re = re.compile("fdir\-TxRx\-\d+")
m = intel_fp_irq_re.search(self.__irqs2procline[irq])
m1 = fdir_re.search(self.__irqs2procline[irq])
if m and not m1:
return int(m.group(1))
else:
return sys.maxsize
def __mlx_irq_to_queue_idx(self, irq):
"""
Return the HW queue index for a given IRQ for Mellanox NICs in order to sort the IRQs' list by this index.
Mellanox NICs have the IRQ which name looks like
this:
mlx5_comp23
mlx5_comp<index>
or this:
mlx4-6
mlx4-<index>
:param irq: IRQ number
:return: HW queue index for Mellanox NICs and 0 for all other NICs
"""
mlx5_fp_irq_re = re.compile("mlx5_comp(\d+)")
mlx4_fp_irq_re = re.compile("mlx4\-(\d+)")
m5 = mlx5_fp_irq_re.search(self.__irqs2procline[irq])
if m5:
return int(m5.group(1))
else:
m4 = mlx4_fp_irq_re.search(self.__irqs2procline[irq])
if m4:
return int(m4.group(1))
return sys.maxsize
def __get_driver_name(self, iface):
"""
:param iface: Interface to check
:return: driver name from ethtool
"""
driver_name = ''
ethtool_i_lines = run_ethtool(['-i', iface])
driver_re = re.compile("driver:")
driver_lines = list(filter(lambda one_line: driver_re.search(one_line), ethtool_i_lines))
if driver_lines:
if len(driver_lines) > 1:
raise Exception("More than one 'driver:' entries in the 'ethtool -i {}' output. Unable to continue.".format(iface))
driver_name = driver_lines[0].split()[1].strip()
return driver_name
def __learn_irqs_one(self, iface):
"""
This is a slow method that is going to read from the system files. Never
use it outside the initialization code. Use __get_irqs_one() instead.
Filter the fast path queues IRQs from the __get_all_irqs_one() result according to the known
patterns.
Right now we know about the following naming convention of the fast path queues vectors:
- Intel: <bla-bla>-TxRx-<bla-bla>
- Broadcom: <bla-bla>-fp-<bla-bla>
- ena: <bla-bla>-Tx-Rx-<bla-bla>
- Mellanox: for mlx4
mlx4-<queue idx>@<bla-bla>
or for mlx5
mlx5_comp<queue idx>@<bla-bla>
So, we will try to filter the etries in /proc/interrupts for IRQs we've got from get_all_irqs_one()
according to the patterns above.
If as a result all IRQs are filtered out (if there are no IRQs with the names from the patterns above) then
this means that the given NIC uses a different IRQs naming pattern. In this case we won't filter any IRQ.
Otherwise, we will use only IRQs which names fit one of the patterns above.
For NICs with a limited number of Rx queues the IRQs that handle Rx are going to be at the beginning of the
list.
"""
# filter 'all_irqs' to only reference valid keys from 'irqs2procline' and avoid an IndexError on the 'irqs' search below
all_irqs = set(learn_all_irqs_one("/sys/class/net/{}/device".format(iface), self.__irqs2procline, iface)).intersection(self.__irqs2procline.keys())
fp_irqs_re = re.compile("\-TxRx\-|\-fp\-|\-Tx\-Rx\-|mlx4-\d+@|mlx5_comp\d+@")
irqs = list(filter(lambda irq : fp_irqs_re.search(self.__irqs2procline[irq]), all_irqs))
if irqs:
driver_name = self.__get_driver_name(iface)
if (driver_name.startswith("mlx")):
irqs.sort(key=self.__mlx_irq_to_queue_idx)
else:
irqs.sort(key=self.__intel_irq_to_queue_idx)
return irqs
else:
return list(all_irqs)
def __learn_irqs(self):
"""
This is a slow method that is going to read from the system files. Never
use it outside the initialization code.
"""
nic_irq_dict={}
for nic in self.nics:
if self.nic_is_bond_iface(nic):
for slave in filter(self.__dev_is_hw_iface, self.slaves(nic)):
nic_irq_dict[slave] = self.__learn_irqs_one(slave)
else:
nic_irq_dict[nic] = self.__learn_irqs_one(nic)
return nic_irq_dict
def __get_rps_cpus(self, iface):
"""
Prints all rps_cpus files names for the given HW interface.
There is a single rps_cpus file for each RPS queue and there is a single RPS
queue for each HW Rx queue. Each HW Rx queue should have an IRQ.
Therefore the number of these files is equal to the number of fast path Rx IRQs for this interface.
"""
return glob.glob("/sys/class/net/{}/queues/*/rps_cpus".format(iface))
def __setup_one_hw_iface(self, iface):
max_num_rx_queues = self.__max_rx_queue_count(iface)
all_irqs = self.__get_irqs_one(iface)
# Bind the NIC's IRQs according to the configuration mode
#
# If this NIC has a limited number of Rx queues then we want to distribute their IRQs separately.
# For such NICs we've sorted IRQs list so that IRQs that handle Rx are all at the head of the list.
if max_num_rx_queues < len(all_irqs):
num_rx_queues = self.__get_rx_queue_count(iface)
perftune_print("Distributing IRQs handling Rx:")
distribute_irqs(all_irqs[0:num_rx_queues], self.irqs_cpu_mask)
perftune_print("Distributing the rest of IRQs")
distribute_irqs(all_irqs[num_rx_queues:], self.irqs_cpu_mask)
else:
perftune_print("Distributing all IRQs")
distribute_irqs(all_irqs, self.irqs_cpu_mask)
self.__setup_rps(iface, self.compute_cpu_mask)
self.__setup_xps(iface)
def __setup_bonding_iface(self, nic):
for slave in self.slaves(nic):
if self.__dev_is_hw_iface(slave):
perftune_print("Setting up {}...".format(slave))
self.__setup_one_hw_iface(slave)
else:
perftune_print("Skipping {} (not a physical slave device?)".format(slave))
def __max_rx_queue_count(self, iface):
"""
:param iface: Interface to check
:return: The maximum number of RSS queues for the given interface if there is known limitation and sys.maxsize
otherwise.
Networking drivers serving HW with the known maximum RSS queue limitation (due to lack of RSS bits):
ixgbe: PF NICs support up to 16 RSS queues.
ixgbevf: VF NICs support up to 4 RSS queues.
i40e: PF NICs support up to 64 RSS queues.
i40evf: VF NICs support up to 16 RSS queues.
"""
driver_to_max_rss = {'ixgbe': 16, 'ixgbevf': 4, 'i40e': 64, 'i40evf': 16}
driver_name = self.__get_driver_name(iface)
return driver_to_max_rss.get(driver_name, sys.maxsize)
def __get_rx_queue_count(self, iface):
"""
:return: the RSS Rx queues count for the given interface.
"""
num_irqs = len(self.__get_irqs_one(iface))
rx_queues_count = len(self.__get_rps_cpus(iface))
if rx_queues_count == 0:
rx_queues_count = num_irqs
return min(self.__max_rx_queue_count(iface), rx_queues_count)
def __get_hw_iface_def_mode(self, iface):
"""
Returns the default configuration mode for the given interface.
"""
rx_queues_count = self.__get_rx_queue_count(iface)
num_cores = int(run_hwloc_calc(['--number-of', 'core', 'machine:0', '--restrict', self.args.cpu_mask]))
num_PUs = int(run_hwloc_calc(['--number-of', 'PU', 'machine:0', '--restrict', self.args.cpu_mask]))
if num_PUs <= 4 or rx_queues_count == num_PUs:
return PerfTunerBase.SupportedModes.mq
elif num_cores <= 4:
return PerfTunerBase.SupportedModes.sq
else:
return PerfTunerBase.SupportedModes.sq_split
class ClocksourceManager:
class PreferredClockSourceNotAvailableException(Exception):
pass
def __init__(self, args):
self.__args = args
self._preferred = {"x86_64": "tsc", "kvm": "kvm-clock"}
self._arch = self._get_arch()
self._available_clocksources_file = "/sys/devices/system/clocksource/clocksource0/available_clocksource"
self._current_clocksource_file = "/sys/devices/system/clocksource/clocksource0/current_clocksource"
self._recommendation_if_unavailable = { "x86_64": "The tsc clocksource is not available. Consider using a hardware platform where the tsc clocksource is available, or try forcing it withe the tsc=reliable boot option", "kvm": "kvm-clock is not available" }
def _available_clocksources(self):
return open(self._available_clocksources_file).readline().split()
def _current_clocksource(self):
return open(self._current_clocksource_file).readline().strip()
def _get_arch(self):
try:
virt = run_read_only_command(['systemd-detect-virt']).strip()
if virt == "kvm":
return virt
except:
pass
return platform.machine()
def enforce_preferred_clocksource(self):
fwriteln(self._current_clocksource_file, self._preferred[self._arch], "Setting clocksource to {}".format(self._preferred[self._arch]))
def preferred(self):
return self._preferred[self._arch]
def setting_available(self):
return self._arch in self._preferred
def preferred_clocksource_available(self):
return self._preferred[self._arch] in self._available_clocksources()
def recommendation_if_unavailable(self):
return self._recommendation_if_unavailable[self._arch]
class SystemPerfTuner(PerfTunerBase):
def __init__(self, args):
super().__init__(args)
self._clocksource_manager = ClocksourceManager(args)
def tune(self):
if self.args.tune_clock:
if not self._clocksource_manager.setting_available():
perftune_print("Clocksource setting not available or not needed for this architecture. Not tuning");
elif not self._clocksource_manager.preferred_clocksource_available():
perftune_print(self._clocksource_manager.recommendation_if_unavailable())
else:
self._clocksource_manager.enforce_preferred_clocksource()
#### Protected methods ##########################
def _get_def_mode(self):
"""
This tuner doesn't apply any restriction to the final tune mode for now.
"""
return PerfTunerBase.SupportedModes.no_irq_restrictions
def _get_irqs(self):
return []
#################################################
class DiskPerfTuner(PerfTunerBase):
class SupportedDiskTypes(enum.IntEnum):
nvme = 0
non_nvme = 1
def __init__(self, args):
super().__init__(args)
if not (self.args.dirs or self.args.devs):
raise Exception("'disks' tuning was requested but neither directories nor storage devices were given")
self.__pyudev_ctx = pyudev.Context()
self.__dir2disks = self.__learn_directories()
self.__irqs2procline = get_irqs2procline_map()
self.__disk2irqs = self.__learn_irqs()
self.__type2diskinfo = self.__group_disks_info_by_type()
# sets of devices that have already been tuned
self.__io_scheduler_tuned_devs = set()
self.__nomerges_tuned_devs = set()
self.__write_back_cache_tuned_devs = set()
#### Public methods #############################
def tune(self):
"""
Distribute IRQs according to the requested mode (args.mode):
- Distribute NVMe disks' IRQs equally among all available CPUs.
- Distribute non-NVMe disks' IRQs equally among designated CPUs or among
all available CPUs in the 'mq' mode.
"""
mode_cpu_mask = PerfTunerBase.irqs_cpu_mask_for_mode(self.mode, self.args.cpu_mask)
non_nvme_disks, non_nvme_irqs = self.__disks_info_by_type(DiskPerfTuner.SupportedDiskTypes.non_nvme)
if non_nvme_disks:
perftune_print("Setting non-NVMe disks: {}...".format(", ".join(non_nvme_disks)))
distribute_irqs(non_nvme_irqs, mode_cpu_mask)
self.__tune_disks(non_nvme_disks)
else:
perftune_print("No non-NVMe disks to tune")
nvme_disks, nvme_irqs = self.__disks_info_by_type(DiskPerfTuner.SupportedDiskTypes.nvme)
if nvme_disks:
# Linux kernel is going to use IRQD_AFFINITY_MANAGED mode for NVMe IRQs
# on most systems (currently only AWS i3 non-metal are known to have a
# different configuration). SMP affinity of an IRQ in this mode may not be
# changed and an attempt to modify it is going to fail. However right now
# the only way to determine that IRQD_AFFINITY_MANAGED mode has been used
# is to attempt to modify IRQ SMP affinity (and fail) therefore we prefer
# to always do it.
#
# What we don't want however is to see annoying errors every time we
# detect that IRQD_AFFINITY_MANAGED was actually used. Therefore we will only log
# them in the "verbose" mode or when we run on an i3.nonmetal AWS instance.
perftune_print("Setting NVMe disks: {}...".format(", ".join(nvme_disks)))
distribute_irqs(nvme_irqs, self.args.cpu_mask,
log_errors=(self.is_aws_i3_non_metal_instance or self.args.verbose))
self.__tune_disks(nvme_disks)
else:
perftune_print("No NVMe disks to tune")
#### Protected methods ##########################
def _get_def_mode(self):
"""
Return a default configuration mode.
"""
# if the only disks we are tuning are NVMe disks - return the MQ mode
non_nvme_disks, non_nvme_irqs = self.__disks_info_by_type(DiskPerfTuner.SupportedDiskTypes.non_nvme)
if not non_nvme_disks:
return PerfTunerBase.SupportedModes.mq
num_cores = int(run_hwloc_calc(['--number-of', 'core', 'machine:0', '--restrict', self.args.cpu_mask]))
num_PUs = int(run_hwloc_calc(['--number-of', 'PU', 'machine:0', '--restrict', self.args.cpu_mask]))
if num_PUs <= 4:
return PerfTunerBase.SupportedModes.mq
elif num_cores <= 4:
return PerfTunerBase.SupportedModes.sq
else:
return PerfTunerBase.SupportedModes.sq_split
def _get_irqs(self):
return itertools.chain.from_iterable(irqs for disks, irqs in self.__type2diskinfo.values())
#### Private methods ############################
@property
def __io_schedulers(self):
"""
:return: An ordered list of IO schedulers that we want to configure. Schedulers are ordered by their priority
from the highest (left most) to the lowest.
"""
return ["none", "noop"]
@property
def __nomerges(self):
return '2'
@property
def __write_cache_config(self):
"""
:return: None - if write cache mode configuration is not requested or the corresponding write cache
configuration value string
"""
if self.args.set_write_back is None:
return None
return "write back" if self.args.set_write_back else "write through"
def __disks_info_by_type(self, disks_type):
"""
Returns a tuple ( [<disks>], [<irqs>] ) for the given disks type.
IRQs numbers in the second list are promised to be unique.
"""
return self.__type2diskinfo[DiskPerfTuner.SupportedDiskTypes(disks_type)]
def __nvme_fast_path_irq_filter(self, irq):
"""
Return True for fast path NVMe IRQs.
For NVMe device only queues 1-<number of CPUs> are going to do fast path work.
NVMe IRQs have the following name convention:
nvme<device index>q<queue index>, e.g. nvme0q7
:param irq: IRQ number
:return: True if this IRQ is an IRQ of a FP NVMe queue.
"""
nvme_irq_re = re.compile(r'(\s|^)nvme\d+q(\d+)(\s|$)')
# There may be more than an single HW queue bound to the same IRQ. In this case queue names are going to be
# coma separated
split_line = self.__irqs2procline[irq].split(",")
for line in split_line:
m = nvme_irq_re.search(line)
if m and 0 < int(m.group(2)) <= multiprocessing.cpu_count():
return True
return False
def __group_disks_info_by_type(self):
"""
Return a map of tuples ( [<disks>], [<irqs>] ), where "disks" are all disks of the specific type
and "irqs" are the corresponding IRQs.
It's promised that every element is "disks" and "irqs" is unique.
The disk types are 'nvme' and 'non-nvme'
"""
disks_info_by_type = {}
nvme_disks = set()
nvme_irqs = set()
non_nvme_disks = set()
non_nvme_irqs = set()
nvme_disk_name_pattern = re.compile('^nvme')
for disk, irqs in self.__disk2irqs.items():
if nvme_disk_name_pattern.search(disk):
nvme_disks.add(disk)
for irq in irqs:
nvme_irqs.add(irq)
else:
non_nvme_disks.add(disk)
for irq in irqs:
non_nvme_irqs.add(irq)
if not (nvme_disks or non_nvme_disks):
raise Exception("'disks' tuning was requested but no disks were found")
nvme_irqs = list(nvme_irqs)
# There is a known issue with Xen hypervisor that exposes itself on AWS i3 instances where nvme module
# over-allocates HW queues and uses only queues 1,2,3,..., <up to number of CPUs> for data transfer.
# On these instances we will distribute only these queues.
if self.is_aws_i3_non_metal_instance:
nvme_irqs = list(filter(self.__nvme_fast_path_irq_filter, nvme_irqs))
# Sort IRQs for easier verification
nvme_irqs.sort(key=lambda irq_num_str: int(irq_num_str))
disks_info_by_type[DiskPerfTuner.SupportedDiskTypes.nvme] = (list(nvme_disks), nvme_irqs)
disks_info_by_type[DiskPerfTuner.SupportedDiskTypes.non_nvme] = ( list(non_nvme_disks), list(non_nvme_irqs) )
return disks_info_by_type
def __learn_directories(self):
return { directory : self.__learn_directory(directory) for directory in self.args.dirs }
def __learn_directory(self, directory, recur=False):
"""
Returns a list of disks the given directory is mounted on (there will be more than one if
the mount point is on the RAID volume)
"""
if not os.path.exists(directory):
if not recur:
perftune_print("{} doesn't exist - skipping".format(directory))
return []
try:
udev_obj = pyudev.Devices.from_device_number(self.__pyudev_ctx, 'block', os.stat(directory).st_dev)
return self.__get_phys_devices(udev_obj)
except:
# handle cases like ecryptfs where the directory is mounted to another directory and not to some block device
filesystem = run_read_only_command(['df', '-P', directory]).splitlines()[-1].split()[0].strip()
if not re.search(r'^/dev/', filesystem):
devs = self.__learn_directory(filesystem, True)
else:
raise Exception("Logic error: failed to create a udev device while 'df -P' {} returns a {}".format(directory, filesystem))
# log error only for the original directory
if not recur and not devs:
perftune_print("Can't get a block device for {} - skipping".format(directory))
return devs
def __get_phys_devices(self, udev_obj):
# if device is a virtual device - the underlying physical devices are going to be its slaves
if re.search(r'virtual', udev_obj.sys_path):
slaves = os.listdir(os.path.join(udev_obj.sys_path, 'slaves'))
# If the device is virtual but doesn't have slaves (e.g. as nvm-subsystem virtual devices) handle it
# as a regular device.
if slaves:
return list(itertools.chain.from_iterable([ self.__get_phys_devices(pyudev.Devices.from_device_file(self.__pyudev_ctx, "/dev/{}".format(slave))) for slave in slaves ]))
# device node is something like /dev/sda1 - we need only the part without /dev/
return [ re.match(r'/dev/(\S+\d*)', udev_obj.device_node).group(1) ]
def __learn_irqs(self):
disk2irqs = {}
for devices in list(self.__dir2disks.values()) + [ self.args.devs ]:
for device in devices:
# There could be that some of the given directories are on the same disk.
# There is no need to rediscover IRQs of the disk we've already handled.
if device in disk2irqs.keys():
continue
udev_obj = pyudev.Devices.from_device_file(self.__pyudev_ctx, "/dev/{}".format(device))
dev_sys_path = udev_obj.sys_path
# If the device is a virtual NVMe device it's sys file name goes as follows:
# /sys/devices/virtual/nvme-subsystem/nvme-subsys0/nvme0n1
#
# and then there is this symlink:
# /sys/devices/virtual/nvme-subsystem/nvme-subsys0/nvme0n1/device/nvme0 -> ../../../pci0000:85/0000:85:01.0/0000:87:00.0/nvme/nvme0
#
# So, the "main device" is a "nvme\d+" prefix of the actual device name.
if re.search(r'virtual', udev_obj.sys_path):
m = re.match(r'(nvme\d+)\S*', device)
if m:
dev_sys_path = "{}/device/{}".format(udev_obj.sys_path, m.group(1))
split_sys_path = list(pathlib.PurePath(pathlib.Path(dev_sys_path).resolve()).parts)
# first part is always /sys/devices/pciXXX ...
controller_path_parts = split_sys_path[0:4]
# ...then there is a chain of one or more "domain:bus:device.function" followed by the storage device enumeration crap
# e.g. /sys/devices/pci0000:00/0000:00:1f.2/ata2/host1/target1:0:0/1:0:0:0/block/sda/sda3 or
# /sys/devices/pci0000:00/0000:00:02.0/0000:02:00.0/host6/target6:2:0/6:2:0:0/block/sda/sda1
# We want only the path till the last BDF including - it contains the IRQs information.
patt = re.compile("^[0-9ABCDEFabcdef]{4}\:[0-9ABCDEFabcdef]{2}\:[0-9ABCDEFabcdef]{2}\.[0-9ABCDEFabcdef]$")
for split_sys_path_branch in split_sys_path[4:]:
if patt.search(split_sys_path_branch):
controller_path_parts.append(split_sys_path_branch)
else:
break
controler_path_str = functools.reduce(lambda x, y : os.path.join(x, y), controller_path_parts)
disk2irqs[device] = learn_all_irqs_one(controler_path_str, self.__irqs2procline, 'blkif')
return disk2irqs
def __get_feature_file(self, dev_node, path_creator):
"""
Find the closest ancestor with the given feature and return its ('feature file', 'device node') tuple.
If there isn't such an ancestor - return (None, None) tuple.
:param dev_node Device node file name, e.g. /dev/sda1
:param path_creator A functor that creates a feature file name given a device system file name
"""
# Sanity check
if dev_node is None or path_creator is None:
return None, None
udev = pyudev.Devices.from_device_file(pyudev.Context(), dev_node)
feature_file = path_creator(udev.sys_path)
if os.path.exists(feature_file):
return feature_file, dev_node
elif udev.parent is not None:
return self.__get_feature_file(udev.parent.device_node, path_creator)
else:
return None, None
def __tune_one_feature(self, dev_node, path_creator, value, tuned_devs_set):
"""
Find the closest ancestor that has the given feature, configure it and
return True.
If there isn't such ancestor - return False.
:param dev_node Device node file name, e.g. /dev/sda1
:param path_creator A functor that creates a feature file name given a device system file name
"""
feature_file, feature_node = self.__get_feature_file(dev_node, path_creator)
if feature_file is None:
return False
if feature_node not in tuned_devs_set:
fwriteln_and_log(feature_file, value)
tuned_devs_set.add(feature_node)
return True
def __tune_io_scheduler(self, dev_node, io_scheduler):
return self.__tune_one_feature(dev_node, lambda p : os.path.join(p, 'queue', 'scheduler'), io_scheduler, self.__io_scheduler_tuned_devs)
def __tune_nomerges(self, dev_node):
return self.__tune_one_feature(dev_node, lambda p : os.path.join(p, 'queue', 'nomerges'), self.__nomerges, self.__nomerges_tuned_devs)
# If write cache configuration is not requested - return True immediately
def __tune_write_back_cache(self, dev_node):
if self.__write_cache_config is None:
return True
return self.__tune_one_feature(dev_node, lambda p : os.path.join(p, 'queue', 'write_cache'), self.__write_cache_config, self.__write_back_cache_tuned_devs)
def __get_io_scheduler(self, dev_node):
"""
Return a supported scheduler that is also present in the required schedulers list (__io_schedulers).
If there isn't such a supported scheduler - return None.
"""
feature_file, feature_node = self.__get_feature_file(dev_node, lambda p : os.path.join(p, 'queue', 'scheduler'))
lines = readlines(feature_file)
if not lines:
return None
# Supported schedulers appear in the config file as a single line as follows:
#
# sched1 [sched2] sched3
#
# ...with one or more schedulers where currently selected scheduler is the one in brackets.
#
# Return the scheduler with the highest priority among those that are supported for the current device.
supported_schedulers = frozenset([scheduler.lstrip("[").rstrip("]").rstrip("\n") for scheduler in lines[0].split(" ")])
return next((scheduler for scheduler in self.__io_schedulers if scheduler in supported_schedulers), None)
def __tune_disk(self, device):
dev_node = "/dev/{}".format(device)
io_scheduler = self.__get_io_scheduler(dev_node)
if not io_scheduler:
perftune_print("Not setting I/O Scheduler for {} - required schedulers ({}) are not supported".format(device, list(self.__io_schedulers)))
elif not self.__tune_io_scheduler(dev_node, io_scheduler):
perftune_print("Not setting I/O Scheduler for {} - feature not present".format(device))
if not self.__tune_nomerges(dev_node):
perftune_print("Not setting 'nomerges' for {} - feature not present".format(device))
if not self.__tune_write_back_cache(dev_node):
perftune_print("Not setting 'write_cache' for {} - feature not present".format(device))
def __tune_disks(self, disks):
for disk in disks:
self.__tune_disk(disk)
################################################################################
class TuneModes(enum.Enum):
disks = 0
net = 1
system = 2
@staticmethod
def names():
return list(TuneModes.__members__.keys())
argp = argparse.ArgumentParser(description = 'Configure various system parameters in order to improve the seastar application performance.', formatter_class=argparse.RawDescriptionHelpFormatter,
epilog=
'''
This script will:
- Ban relevant IRQs from being moved by irqbalance.
- Configure various system parameters in /proc/sys.
- Distribute the IRQs (using SMP affinity configuration) among CPUs according to the configuration mode (see below).
As a result some of the CPUs may be destined to only handle the IRQs and taken out of the CPU set
that should be used to run the seastar application ("compute CPU set").
Modes description:
sq - set all IRQs of a given NIC to CPU0 and configure RPS
to spreads NAPIs' handling between other CPUs.
sq_split - divide all IRQs of a given NIC between CPU0 and its HT siblings and configure RPS
to spreads NAPIs' handling between other CPUs.
mq - distribute NIC's IRQs among all CPUs instead of binding
them all to CPU0. In this mode RPS is always enabled to
spreads NAPIs' handling between all CPUs.
If there isn't any mode given script will use a default mode:
- If number of physical CPU cores per Rx HW queue is greater than 4 - use the 'sq-split' mode.
- Otherwise, if number of hyperthreads per Rx HW queue is greater than 4 - use the 'sq' mode.
- Otherwise use the 'mq' mode.
Default values:
--nic NIC - default: eth0
--cpu-mask MASK - default: all available cores mask
--tune-clock - default: false
''')
argp.add_argument('--mode', choices=PerfTunerBase.SupportedModes.names(), help='configuration mode')
argp.add_argument('--nic', action='append', help='network interface name(s), by default uses \'eth0\' (may appear more than once)', dest='nics', default=[])
argp.add_argument('--tune-clock', action='store_true', help='Force tuning of the system clocksource')
argp.add_argument('--get-cpu-mask', action='store_true', help="print the CPU mask to be used for compute")
argp.add_argument('--get-cpu-mask-quiet', action='store_true', help="print the CPU mask to be used for compute, print the zero CPU set if that's what it turns out to be")
argp.add_argument('--verbose', action='store_true', help="be more verbose about operations and their result")
argp.add_argument('--tune', choices=TuneModes.names(), help="components to configure (may be given more than once)", action='append', default=[])
argp.add_argument('--cpu-mask', help="mask of cores to use, by default use all available cores", metavar='MASK')
argp.add_argument('--irq-cpu-mask', help="mask of cores to use for IRQs binding", metavar='MASK')
argp.add_argument('--dir', help="directory to optimize (may appear more than once)", action='append', dest='dirs', default=[])
argp.add_argument('--dev', help="device to optimize (may appear more than once), e.g. sda1", action='append', dest='devs', default=[])
argp.add_argument('--options-file', help="configuration YAML file")
argp.add_argument('--dump-options-file', action='store_true', help="Print the configuration YAML file containing the current configuration")
argp.add_argument('--dry-run', action='store_true', help="Don't take any action, just recommend what to do.")
argp.add_argument('--write-back-cache', help="Enable/Disable \'write back\' write cache mode.", dest="set_write_back")
def parse_cpu_mask_from_yaml(y, field_name, fname):
hex_32bit_pattern='0x[0-9a-fA-F]{1,8}'
mask_pattern = re.compile('^{}((,({})?)*,{})*$'.format(hex_32bit_pattern, hex_32bit_pattern, hex_32bit_pattern))
if mask_pattern.match(str(y[field_name])):
return y[field_name]
else:
raise Exception("Bad '{}' value in {}: {}".format(field_name, fname, str(y[field_name])))
def extend_and_unique(orig_list, iterable):
"""
Extend items to a list, and make the list items unique
"""
assert(isinstance(orig_list, list))
assert(isinstance(iterable, list))
orig_list.extend(iterable)
return list(set(orig_list))
def parse_options_file(prog_args):
if not prog_args.options_file:
return
y = yaml.safe_load(open(prog_args.options_file))
if y is None:
return
if 'mode' in y and not prog_args.mode:
if not y['mode'] in PerfTunerBase.SupportedModes.names():
raise Exception("Bad 'mode' value in {}: {}".format(prog_args.options_file, y['mode']))
prog_args.mode = y['mode']
if 'nic' in y:
# Multiple nics was supported by commit a2fc9d72c31b97840bc75ae49dbd6f4b6d394e25
# `nic' option dumped to config file will be list after this change, but the `nic'
# option in old config file is still string, which was generated before this change.
# So here convert the string option to list.
if not isinstance(y['nic'], list):
y['nic'] = [y['nic']]
prog_args.nics = extend_and_unique(prog_args.nics, y['nic'])
if 'tune_clock' in y and not prog_args.tune_clock:
prog_args.tune_clock= y['tune_clock']
if 'tune' in y:
if set(y['tune']) <= set(TuneModes.names()):
prog_args.tune = extend_and_unique(prog_args.tune, y['tune'])
else:
raise Exception("Bad 'tune' value in {}: {}".format(prog_args.options_file, y['tune']))
if 'cpu_mask' in y and not prog_args.cpu_mask:
prog_args.cpu_mask = parse_cpu_mask_from_yaml(y, 'cpu_mask', prog_args.options_file)
if 'irq_cpu_mask' in y and not prog_args.irq_cpu_mask:
prog_args.irq_cpu_mask = parse_cpu_mask_from_yaml(y, 'irq_cpu_mask', prog_args.options_file)
if 'dir' in y:
prog_args.dirs = extend_and_unique(prog_args.dirs, y['dir'])
if 'dev' in y:
prog_args.devs = extend_and_unique(prog_args.devs, y['dev'])
if 'write_back_cache' in y:
prog_args.set_write_back = distutils.util.strtobool("{}".format(y['write_back_cache']))
def dump_config(prog_args):
prog_options = {}
if prog_args.mode:
prog_options['mode'] = prog_args.mode
if prog_args.nics:
prog_options['nic'] = prog_args.nics
if prog_args.tune_clock:
prog_options['tune_clock'] = prog_args.tune_clock
if prog_args.tune:
prog_options['tune'] = prog_args.tune
if prog_args.cpu_mask:
prog_options['cpu_mask'] = prog_args.cpu_mask
if prog_args.irq_cpu_mask:
prog_options['irq_cpu_mask'] = prog_args.irq_cpu_mask
if prog_args.dirs:
prog_options['dir'] = prog_args.dirs
if prog_args.devs:
prog_options['dev'] = prog_args.devs
if prog_args.set_write_back is not None:
prog_options['write_back_cache'] = prog_args.set_write_back
perftune_print(yaml.dump(prog_options, default_flow_style=False))
################################################################################
args = argp.parse_args()
# Sanity check
try:
if args.set_write_back:
args.set_write_back = distutils.util.strtobool(args.set_write_back)
except:
sys.exit("Invalid --write-back-cache value: should be boolean but given: {}".format(args.set_write_back))
dry_run_mode = args.dry_run
parse_options_file(args)
# if nothing needs to be configured - quit
if not args.tune:
sys.exit("ERROR: At least one tune mode MUST be given.")
# The must be either 'mode' or an explicit 'irq_cpu_mask' given - not both
if args.mode and args.irq_cpu_mask:
sys.exit("ERROR: Provide either tune mode or IRQs CPU mask - not both.")
# set default values #####################
if not args.nics:
args.nics = ['eth0']
if not args.cpu_mask:
args.cpu_mask = run_hwloc_calc(['all'])
##########################################
# Sanity: irq_cpu_mask should be a subset of cpu_mask
if args.irq_cpu_mask and run_hwloc_calc([args.cpu_mask]) != run_hwloc_calc([args.cpu_mask, args.irq_cpu_mask]):
sys.exit("ERROR: IRQ CPU mask({}) must be a subset of CPU mask({})".format(args.irq_cpu_mask, args.cpu_mask))
if args.dump_options_file:
dump_config(args)
sys.exit(0)
try:
tuners = []
if TuneModes.disks.name in args.tune:
tuners.append(DiskPerfTuner(args))
if TuneModes.net.name in args.tune:
tuners.append(NetPerfTuner(args))
if TuneModes.system.name in args.tune:
tuners.append(SystemPerfTuner(args))
# Set the minimum mode among all tuners
if not args.irq_cpu_mask:
mode = PerfTunerBase.SupportedModes.combine([tuner.mode for tuner in tuners])
for tuner in tuners:
tuner.mode = mode
if args.get_cpu_mask or args.get_cpu_mask_quiet:
# Print the compute mask from the first tuner - it's going to be the same in all of them
perftune_print(tuners[0].compute_cpu_mask)
else:
# Tune the system
restart_irqbalance(itertools.chain.from_iterable([ tuner.irqs for tuner in tuners ]))
for tuner in tuners:
tuner.tune()
except PerfTunerBase.CPUMaskIsZeroException as e:
# Print a zero CPU set if --get-cpu-mask-quiet was requested.
if args.get_cpu_mask_quiet:
perftune_print("0x0")
else:
sys.exit("ERROR: {}. Your system can't be tuned until the issue is fixed.".format(e))
except Exception as e:
sys.exit("ERROR: {}. Your system can't be tuned until the issue is fixed.".format(e))
|
apache-2.0
| -1,003,424,227,751,248,800
| 39.852902
| 264
| 0.611516
| false
| 3.612939
| true
| false
| false
|
flant/dapp
|
pkg/build/builder/ansible/callback/werf.py
|
1
|
2679
|
# -*- coding: utf-8 -*-
# (c) 2018, Ivan Mikheykin <ivan.mikheykin@flant.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
callback: werf
type: stdout
short_description: Print related werf config section in case of task failure
version_added: "2.4"
description:
- Solo mode with live stdout for raw and script tasks
- Werf specific error messages
requirements:
- set as stdout callback in configuration
'''
from callback.live import CallbackModule as CallbackModule_live
from callback.live import vt100, lColor
from ansible import constants as C
from ansible.utils.color import stringc
import os
import json
class CallbackModule(CallbackModule_live):
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'werf'
def __init__(self):
self.super_ref = super(CallbackModule, self)
self.super_ref.__init__()
def v2_runner_on_failed(self, result, ignore_errors=False):
self.super_ref.v2_runner_on_failed(result, ignore_errors)
# get config sections from werf
# task config text is in a last tag
# doctext is in a file WERF_DUMP_CONFIG_DOC_PATH
self._display_werf_config(result._task)
def _read_dump_config_doc(self):
# read content from file in WERF_DUMP_CONFIG_DOC_PATH env
if 'WERF_DUMP_CONFIG_DOC_PATH' not in os.environ:
return {}
dump_path = os.environ['WERF_DUMP_CONFIG_DOC_PATH']
res = {}
try:
fh = open(dump_path, 'r')
res = json.load(fh)
fh.close()
except:
pass
return res
# werf_stage_name commented for consistency with werffile-yml behaviour
def _display_werf_config(self, task):
tags = task.tags
if not tags or len(tags) == 0:
return
# last tag is a key to a section dump in dump_config
dump_config_section_key = tags[-1]
dump_config = self._read_dump_config_doc()
dump_config_doc = dump_config.get('dump_config_doc', '')
dump_config_sections = dump_config.get('dump_config_sections', {})
dump_config_section = dump_config_sections.get(dump_config_section_key, '')
self.LogArgs(
u"\n",
lColor.COLOR_DEBUG, u"Failed task configuration:\n\n", vt100.reset,
stringc(dump_config_section, C.COLOR_DEBUG),
u"\n",
stringc(dump_config_doc, C.COLOR_DEBUG),
u"\n")
|
apache-2.0
| -6,994,132,084,483,275,000
| 31.670732
| 92
| 0.628593
| false
| 3.605653
| true
| false
| false
|
gatsinski/kindergarten-management-system
|
kindergarten_management_system/kms/contrib/cms_carousel/migrations/0001_initial.py
|
1
|
1898
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import filer.fields.image
class Migration(migrations.Migration):
dependencies = [
('cms', '0016_auto_20160608_1535'),
('filer', '0007_auto_20161016_1055'),
]
operations = [
migrations.CreateModel(
name='CarouselContainerPluginModel',
fields=[
('cmsplugin_ptr', models.OneToOneField(related_name='cms_carousel_carouselcontainerpluginmodel', parent_link=True, auto_created=True, serialize=False, primary_key=True, to='cms.CMSPlugin')),
('title', models.CharField(max_length=254, verbose_name='Title')),
('slug', models.SlugField(max_length=254, verbose_name='Slug')),
],
options={
'verbose_name_plural': 'Carousels',
'verbose_name': 'Carousel',
},
bases=('cms.cmsplugin',),
),
migrations.CreateModel(
name='CarouselImagePluginModel',
fields=[
('cmsplugin_ptr', models.OneToOneField(related_name='cms_carousel_carouselimagepluginmodel', parent_link=True, auto_created=True, serialize=False, primary_key=True, to='cms.CMSPlugin')),
('title', models.CharField(max_length=254, verbose_name='Text', blank=True)),
('text', models.TextField(max_length=1000, verbose_name='Text', blank=True)),
('image', filer.fields.image.FilerImageField(related_name='carousel_images', on_delete=django.db.models.deletion.PROTECT, verbose_name='Image', to='filer.Image')),
],
options={
'verbose_name_plural': 'Carousel images',
'verbose_name': 'Carousel image',
},
bases=('cms.cmsplugin',),
),
]
|
gpl-3.0
| -9,153,950,609,728,425,000
| 42.136364
| 206
| 0.591149
| false
| 4.029724
| false
| false
| false
|
yousrabk/mne-python
|
mne/viz/ica.py
|
1
|
30591
|
"""Functions to plot ICA specific data (besides topographies)
"""
from __future__ import print_function
# Authors: Denis Engemann <denis.engemann@gmail.com>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# Teon Brooks <teon.brooks@gmail.com>
#
# License: Simplified BSD
from functools import partial
import numpy as np
from .utils import (tight_layout, _prepare_trellis, _select_bads,
_layout_figure, _plot_raw_onscroll, _mouse_click,
_helper_raw_resize, _plot_raw_onkey, plt_show)
from .raw import _prepare_mne_browse_raw, _plot_raw_traces
from .epochs import _prepare_mne_browse_epochs
from .evoked import _butterfly_on_button_press, _butterfly_onpick
from .topomap import _prepare_topo_plot, plot_topomap
from ..utils import logger
from ..defaults import _handle_default
from ..io.meas_info import create_info
from ..io.pick import pick_types
from ..externals.six import string_types
def _ica_plot_sources_onpick_(event, sources=None, ylims=None):
"""Onpick callback for plot_ica_panel"""
# make sure that the swipe gesture in OS-X doesn't open many figures
if event.mouseevent.inaxes is None or event.mouseevent.button != 1:
return
artist = event.artist
try:
import matplotlib.pyplot as plt
plt.figure()
src_idx = artist._mne_src_idx
component = artist._mne_component
plt.plot(sources[src_idx], 'r' if artist._mne_is_bad else 'k')
plt.ylim(ylims)
plt.grid(linestyle='-', color='gray', linewidth=.25)
plt.title('ICA #%i' % component)
except Exception as err:
# matplotlib silently ignores exceptions in event handlers, so we print
# it here to know what went wrong
print(err)
raise err
def plot_ica_sources(ica, inst, picks=None, exclude=None, start=None,
stop=None, show=True, title=None, block=False):
"""Plot estimated latent sources given the unmixing matrix.
Typical usecases:
1. plot evolution of latent sources over time based on (Raw input)
2. plot latent source around event related time windows (Epochs input)
3. plot time-locking in ICA space (Evoked input)
Parameters
----------
ica : instance of mne.preprocessing.ICA
The ICA solution.
inst : instance of mne.io.Raw, mne.Epochs, mne.Evoked
The object to plot the sources from.
picks : int | array_like of int | None.
The components to be displayed. If None, plot will show the
sources in the order as fitted.
exclude : array_like of int
The components marked for exclusion. If None (default), ICA.exclude
will be used.
start : int
X-axis start index. If None, from the beginning.
stop : int
X-axis stop index. If None, next 20 are shown, in case of evoked to the
end.
show : bool
Show figure if True.
title : str | None
The figure title. If None a default is provided.
block : bool
Whether to halt program execution until the figure is closed.
Useful for interactive selection of components in raw and epoch
plotter. For evoked, this parameter has no effect. Defaults to False.
Returns
-------
fig : instance of pyplot.Figure
The figure.
Notes
-----
For raw and epoch instances, it is possible to select components for
exclusion by clicking on the line. The selected components are added to
``ica.exclude`` on close.
.. versionadded:: 0.10.0
"""
from ..io.base import _BaseRaw
from ..evoked import Evoked
from ..epochs import _BaseEpochs
if exclude is None:
exclude = ica.exclude
elif len(ica.exclude) > 0:
exclude = np.union1d(ica.exclude, exclude)
if isinstance(inst, _BaseRaw):
fig = _plot_sources_raw(ica, inst, picks, exclude, start=start,
stop=stop, show=show, title=title,
block=block)
elif isinstance(inst, _BaseEpochs):
fig = _plot_sources_epochs(ica, inst, picks, exclude, start=start,
stop=stop, show=show, title=title,
block=block)
elif isinstance(inst, Evoked):
sources = ica.get_sources(inst)
if start is not None or stop is not None:
inst = inst.crop(start, stop, copy=True)
fig = _plot_ica_sources_evoked(
evoked=sources, picks=picks, exclude=exclude, title=title,
labels=getattr(ica, 'labels_', None), show=show)
else:
raise ValueError('Data input must be of Raw or Epochs type')
return fig
def _plot_ica_grid(sources, start, stop,
source_idx, ncol, exclude,
title, show):
"""Create panel plots of ICA sources
Clicking on the plot of an individual source opens a new figure showing
the source.
Parameters
----------
sources : ndarray
Sources as drawn from ica.get_sources.
start : int
x-axis start index. If None from the beginning.
stop : int
x-axis stop index. If None to the end.
n_components : int
Number of components fitted.
source_idx : array-like
Indices for subsetting the sources.
ncol : int
Number of panel-columns.
title : str
The figure title. If None a default is provided.
show : bool
If True, all open plots will be shown.
"""
import matplotlib.pyplot as plt
if source_idx is None:
source_idx = np.arange(len(sources))
elif isinstance(source_idx, list):
source_idx = np.array(source_idx)
if exclude is None:
exclude = []
n_components = len(sources)
ylims = sources.min(), sources.max()
xlims = np.arange(sources.shape[-1])[[0, -1]]
fig, axes = _prepare_trellis(n_components, ncol)
if title is None:
fig.suptitle('Reconstructed latent sources', size=16)
elif title:
fig.suptitle(title, size=16)
plt.subplots_adjust(wspace=0.05, hspace=0.05)
my_iter = enumerate(zip(source_idx, axes, sources))
for i_source, (i_selection, ax, source) in my_iter:
component = '[%i]' % i_selection
# plot+ emebed idx and comp. name to use in callback
color = 'r' if i_selection in exclude else 'k'
line = ax.plot(source, linewidth=0.5, color=color, picker=1e9)[0]
vars(line)['_mne_src_idx'] = i_source
vars(line)['_mne_component'] = i_selection
vars(line)['_mne_is_bad'] = i_selection in exclude
ax.set_xlim(xlims)
ax.set_ylim(ylims)
ax.text(0.05, .95, component, transform=ax.transAxes,
verticalalignment='top')
plt.setp(ax.get_xticklabels(), visible=False)
plt.setp(ax.get_yticklabels(), visible=False)
# register callback
callback = partial(_ica_plot_sources_onpick_, sources=sources, ylims=ylims)
fig.canvas.mpl_connect('pick_event', callback)
plt_show(show)
return fig
def _plot_ica_sources_evoked(evoked, picks, exclude, title, show, labels=None):
"""Plot average over epochs in ICA space
Parameters
----------
evoked : instance of mne.Evoked
The Evoked to be used.
picks : int | array_like of int | None.
The components to be displayed. If None, plot will show the
sources in the order as fitted.
exclude : array_like of int
The components marked for exclusion. If None (default), ICA.exclude
will be used.
title : str
The figure title.
show : bool
Show figure if True.
labels : None | dict
The ICA labels attribute.
"""
import matplotlib.pyplot as plt
if title is None:
title = 'Reconstructed latent sources, time-locked'
fig, axes = plt.subplots(1)
ax = axes
axes = [axes]
idxs = [0]
times = evoked.times * 1e3
# plot unclassified sources and label excluded ones
lines = list()
texts = list()
if picks is None:
picks = np.arange(evoked.data.shape[0])
picks = np.sort(picks)
idxs = [picks]
color = 'r'
if labels is not None:
labels_used = [k for k in labels if '/' not in k]
exclude_labels = list()
for ii in picks:
if ii in exclude:
line_label = 'ICA %03d' % (ii + 1)
if labels is not None:
annot = list()
for this_label in labels_used:
indices = labels[this_label]
if ii in indices:
annot.append(this_label)
line_label += (' - ' + ', '.join(annot))
exclude_labels.append(line_label)
else:
exclude_labels.append(None)
if labels is not None:
unique_labels = set([k.split(' - ')[1] for k in exclude_labels if k])
label_colors = plt.cm.rainbow(np.linspace(0, 1, len(unique_labels)))
label_colors = dict(zip(unique_labels, label_colors))
else:
label_colors = dict((k, 'red') for k in exclude_labels)
for exc_label, ii in zip(exclude_labels, picks):
if exc_label is not None:
if labels is not None:
exc_label = exc_label.split(' - ')[1]
color = label_colors[exc_label]
lines.extend(ax.plot(times, evoked.data[ii].T, picker=3.,
zorder=1, color=color, label=exc_label))
else:
lines.extend(ax.plot(times, evoked.data[ii].T, picker=3.,
color='k', zorder=0))
ax.set_title(title)
ax.set_xlim(times[[0, -1]])
ax.set_xlabel('Time (ms)')
ax.set_ylabel('(NA)')
if len(exclude) > 0:
plt.legend(loc='best')
tight_layout(fig=fig)
# for old matplotlib, we actually need this to have a bounding
# box (!), so we have to put some valid text here, change
# alpha and path effects later
texts.append(ax.text(0, 0, 'blank', zorder=2,
verticalalignment='baseline',
horizontalalignment='left',
fontweight='bold', alpha=0))
# this is done to give the structure of a list of lists of a group of lines
# in each subplot
lines = [lines]
ch_names = evoked.ch_names
from matplotlib import patheffects
path_effects = [patheffects.withStroke(linewidth=2, foreground="w",
alpha=0.75)]
params = dict(axes=axes, texts=texts, lines=lines, idxs=idxs,
ch_names=ch_names, need_draw=False,
path_effects=path_effects)
fig.canvas.mpl_connect('pick_event',
partial(_butterfly_onpick, params=params))
fig.canvas.mpl_connect('button_press_event',
partial(_butterfly_on_button_press,
params=params))
plt_show(show)
return fig
def plot_ica_scores(ica, scores,
exclude=None, labels=None,
axhline=None,
title='ICA component scores',
figsize=(12, 6), show=True):
"""Plot scores related to detected components.
Use this function to asses how well your score describes outlier
sources and how well you were detecting them.
Parameters
----------
ica : instance of mne.preprocessing.ICA
The ICA object.
scores : array_like of float, shape (n ica components) | list of arrays
Scores based on arbitrary metric to characterize ICA components.
exclude : array_like of int
The components marked for exclusion. If None (default), ICA.exclude
will be used.
labels : str | list | 'ecg' | 'eog' | None
The labels to consider for the axes tests. Defaults to None.
If list, should match the outer shape of `scores`.
If 'ecg' or 'eog', the labels_ attributes will be looked up.
Note that '/' is used internally for sublabels specifying ECG and
EOG channels.
axhline : float
Draw horizontal line to e.g. visualize rejection threshold.
title : str
The figure title.
figsize : tuple of int
The figure size. Defaults to (12, 6).
show : bool
Show figure if True.
Returns
-------
fig : instance of matplotlib.pyplot.Figure
The figure object
"""
import matplotlib.pyplot as plt
my_range = np.arange(ica.n_components_)
if exclude is None:
exclude = ica.exclude
exclude = np.unique(exclude)
if not isinstance(scores[0], (list, np.ndarray)):
scores = [scores]
n_rows = len(scores)
figsize = (12, 6) if figsize is None else figsize
fig, axes = plt.subplots(n_rows, figsize=figsize, sharex=True, sharey=True)
if isinstance(axes, np.ndarray):
axes = axes.flatten()
else:
axes = [axes]
plt.suptitle(title)
if labels == 'ecg':
labels = [l for l in ica.labels_ if l.startswith('ecg/')]
elif labels == 'eog':
labels = [l for l in ica.labels_ if l.startswith('eog/')]
labels.sort(key=lambda l: l.split('/')[1]) # sort by index
elif isinstance(labels, string_types):
if len(axes) > 1:
raise ValueError('Need as many labels as axes (%i)' % len(axes))
labels = [labels]
elif isinstance(labels, (tuple, list)):
if len(labels) != len(axes):
raise ValueError('Need as many labels as axes (%i)' % len(axes))
elif labels is None:
labels = (None, None)
for label, this_scores, ax in zip(labels, scores, axes):
if len(my_range) != len(this_scores):
raise ValueError('The length of `scores` must equal the '
'number of ICA components.')
ax.bar(my_range, this_scores, color='w')
for excl in exclude:
ax.bar(my_range[excl], this_scores[excl], color='r')
if axhline is not None:
if np.isscalar(axhline):
axhline = [axhline]
for axl in axhline:
ax.axhline(axl, color='r', linestyle='--')
ax.set_ylabel('score')
if label is not None:
if 'eog/' in label:
split = label.split('/')
label = ', '.join([split[0], split[2]])
elif '/' in label:
label = ', '.join(label.split('/'))
ax.set_title('(%s)' % label)
ax.set_xlabel('ICA components')
ax.set_xlim(0, len(this_scores))
tight_layout(fig=fig)
if len(axes) > 1:
plt.subplots_adjust(top=0.9)
plt_show(show)
return fig
def plot_ica_overlay(ica, inst, exclude=None, picks=None, start=None,
stop=None, title=None, show=True):
"""Overlay of raw and cleaned signals given the unmixing matrix.
This method helps visualizing signal quality and artifact rejection.
Parameters
----------
ica : instance of mne.preprocessing.ICA
The ICA object.
inst : instance of mne.io.Raw or mne.Evoked
The signals to be compared given the ICA solution. If Raw input,
The raw data are displayed before and after cleaning. In a second
panel the cross channel average will be displayed. Since dipolar
sources will be canceled out this display is sensitive to
artifacts. If evoked input, butterfly plots for clean and raw
signals will be superimposed.
exclude : array_like of int
The components marked for exclusion. If None (default), ICA.exclude
will be used.
picks : array-like of int | None (default)
Indices of channels to include (if None, all channels
are used that were included on fitting).
start : int
X-axis start index. If None from the beginning.
stop : int
X-axis stop index. If None to the end.
title : str
The figure title.
show : bool
Show figure if True.
Returns
-------
fig : instance of pyplot.Figure
The figure.
"""
# avoid circular imports
from ..io.base import _BaseRaw
from ..evoked import Evoked
from ..preprocessing.ica import _check_start_stop
if not isinstance(inst, (_BaseRaw, Evoked)):
raise ValueError('Data input must be of Raw or Evoked type')
if title is None:
title = 'Signals before (red) and after (black) cleaning'
if picks is None:
picks = [inst.ch_names.index(k) for k in ica.ch_names]
if exclude is None:
exclude = ica.exclude
if isinstance(inst, _BaseRaw):
if start is None:
start = 0.0
if stop is None:
stop = 3.0
ch_types_used = [k for k in ['mag', 'grad', 'eeg'] if k in ica]
start_compare, stop_compare = _check_start_stop(inst, start, stop)
data, times = inst[picks, start_compare:stop_compare]
raw_cln = ica.apply(inst, exclude=exclude, start=start, stop=stop,
copy=True)
data_cln, _ = raw_cln[picks, start_compare:stop_compare]
fig = _plot_ica_overlay_raw(data=data, data_cln=data_cln,
times=times * 1e3, title=title,
ch_types_used=ch_types_used, show=show)
elif isinstance(inst, Evoked):
if start is not None and stop is not None:
inst = inst.crop(start, stop, copy=True)
if picks is not None:
inst.pick_channels([inst.ch_names[p] for p in picks])
evoked_cln = ica.apply(inst, exclude=exclude, copy=True)
fig = _plot_ica_overlay_evoked(evoked=inst, evoked_cln=evoked_cln,
title=title, show=show)
return fig
def _plot_ica_overlay_raw(data, data_cln, times, title, ch_types_used, show):
"""Plot evoked after and before ICA cleaning
Parameters
----------
ica : instance of mne.preprocessing.ICA
The ICA object.
epochs : instance of mne.Epochs
The Epochs to be regarded.
show : bool
Show figure if True.
Returns
-------
fig : instance of pyplot.Figure
"""
import matplotlib.pyplot as plt
# Restore sensor space data and keep all PCA components
# let's now compare the date before and after cleaning.
# first the raw data
assert data.shape == data_cln.shape
fig, (ax1, ax2) = plt.subplots(2, 1, sharex=True)
plt.suptitle(title)
ax1.plot(times, data.T, color='r')
ax1.plot(times, data_cln.T, color='k')
ax1.set_xlabel('time (s)')
ax1.set_xlim(times[0], times[-1])
ax1.set_xlim(times[0], times[-1])
ax1.set_title('Raw data')
_ch_types = {'mag': 'Magnetometers',
'grad': 'Gradiometers',
'eeg': 'EEG'}
ch_types = ', '.join([_ch_types[k] for k in ch_types_used])
ax2.set_title('Average across channels ({0})'.format(ch_types))
ax2.plot(times, data.mean(0), color='r')
ax2.plot(times, data_cln.mean(0), color='k')
ax2.set_xlim(100, 106)
ax2.set_xlabel('time (ms)')
ax2.set_xlim(times[0], times[-1])
tight_layout(fig=fig)
fig.subplots_adjust(top=0.90)
fig.canvas.draw()
plt_show(show)
return fig
def _plot_ica_overlay_evoked(evoked, evoked_cln, title, show):
"""Plot evoked after and before ICA cleaning
Parameters
----------
ica : instance of mne.preprocessing.ICA
The ICA object.
epochs : instance of mne.Epochs
The Epochs to be regarded.
show : bool
If True, all open plots will be shown.
Returns
-------
fig : instance of pyplot.Figure
"""
import matplotlib.pyplot as plt
ch_types_used = [c for c in ['mag', 'grad', 'eeg'] if c in evoked]
n_rows = len(ch_types_used)
ch_types_used_cln = [c for c in ['mag', 'grad', 'eeg'] if
c in evoked_cln]
if len(ch_types_used) != len(ch_types_used_cln):
raise ValueError('Raw and clean evokeds must match. '
'Found different channels.')
fig, axes = plt.subplots(n_rows, 1)
fig.suptitle('Average signal before (red) and after (black) ICA')
axes = axes.flatten() if isinstance(axes, np.ndarray) else axes
evoked.plot(axes=axes, show=show)
for ax in fig.axes:
for l in ax.get_lines():
l.set_color('r')
fig.canvas.draw()
evoked_cln.plot(axes=axes, show=show)
tight_layout(fig=fig)
fig.subplots_adjust(top=0.90)
fig.canvas.draw()
plt_show(show)
return fig
def _plot_sources_raw(ica, raw, picks, exclude, start, stop, show, title,
block):
"""Function for plotting the ICA components as raw array."""
color = _handle_default('color', (0., 0., 0.))
orig_data = ica._transform_raw(raw, 0, len(raw.times)) * 0.2
if picks is None:
picks = range(len(orig_data))
types = ['misc' for _ in picks]
picks = list(sorted(picks))
eog_chs = pick_types(raw.info, meg=False, eog=True, ref_meg=False)
ecg_chs = pick_types(raw.info, meg=False, ecg=True, ref_meg=False)
data = [orig_data[pick] for pick in picks]
c_names = ['ICA %03d' % x for x in range(len(orig_data))]
for eog_idx in eog_chs:
c_names.append(raw.ch_names[eog_idx])
types.append('eog')
for ecg_idx in ecg_chs:
c_names.append(raw.ch_names[ecg_idx])
types.append('ecg')
extra_picks = np.append(eog_chs, ecg_chs).astype(int)
if len(extra_picks) > 0:
eog_ecg_data, _ = raw[extra_picks, :]
for idx in range(len(eog_ecg_data)):
if idx < len(eog_chs):
eog_ecg_data[idx] /= 150e-6 # scaling for eog
else:
eog_ecg_data[idx] /= 5e-4 # scaling for ecg
data = np.append(data, eog_ecg_data, axis=0)
for idx in range(len(extra_picks)):
picks = np.append(picks, ica.n_components_ + idx)
if title is None:
title = 'ICA components'
info = create_info([c_names[x] for x in picks], raw.info['sfreq'])
info['bads'] = [c_names[x] for x in exclude]
if start is None:
start = 0
if stop is None:
stop = start + 20
stop = min(stop, raw.times[-1])
duration = stop - start
if duration <= 0:
raise RuntimeError('Stop must be larger than start.')
t_end = int(duration * raw.info['sfreq'])
times = raw.times[0:t_end]
bad_color = (1., 0., 0.)
inds = list(range(len(picks)))
data = np.array(data)
n_channels = min([20, len(picks)])
params = dict(raw=raw, orig_data=data, data=data[:, 0:t_end],
ch_start=0, t_start=start, info=info, duration=duration,
ica=ica, n_channels=n_channels, times=times, types=types,
n_times=raw.n_times, bad_color=bad_color, picks=picks)
_prepare_mne_browse_raw(params, title, 'w', color, bad_color, inds,
n_channels)
params['scale_factor'] = 1.0
params['plot_fun'] = partial(_plot_raw_traces, params=params, inds=inds,
color=color, bad_color=bad_color)
params['update_fun'] = partial(_update_data, params)
params['pick_bads_fun'] = partial(_pick_bads, params=params)
params['label_click_fun'] = partial(_label_clicked, params=params)
_layout_figure(params)
# callbacks
callback_key = partial(_plot_raw_onkey, params=params)
params['fig'].canvas.mpl_connect('key_press_event', callback_key)
callback_scroll = partial(_plot_raw_onscroll, params=params)
params['fig'].canvas.mpl_connect('scroll_event', callback_scroll)
callback_pick = partial(_mouse_click, params=params)
params['fig'].canvas.mpl_connect('button_press_event', callback_pick)
callback_resize = partial(_helper_raw_resize, params=params)
params['fig'].canvas.mpl_connect('resize_event', callback_resize)
callback_close = partial(_close_event, params=params)
params['fig'].canvas.mpl_connect('close_event', callback_close)
params['fig_proj'] = None
params['event_times'] = None
params['update_fun']()
params['plot_fun']()
try:
plt_show(show, block=block)
except TypeError: # not all versions have this
plt_show(show)
return params['fig']
def _update_data(params):
"""Function for preparing the data on horizontal shift of the viewport."""
sfreq = params['info']['sfreq']
start = int(params['t_start'] * sfreq)
end = int((params['t_start'] + params['duration']) * sfreq)
params['data'] = params['orig_data'][:, start:end]
params['times'] = params['raw'].times[start:end]
def _pick_bads(event, params):
"""Function for selecting components on click."""
bads = params['info']['bads']
params['info']['bads'] = _select_bads(event, params, bads)
params['update_fun']()
params['plot_fun']()
def _close_event(events, params):
"""Function for excluding the selected components on close."""
info = params['info']
c_names = ['ICA %03d' % x for x in range(params['ica'].n_components_)]
exclude = [c_names.index(x) for x in info['bads'] if x.startswith('ICA')]
params['ica'].exclude = exclude
def _plot_sources_epochs(ica, epochs, picks, exclude, start, stop, show,
title, block):
"""Function for plotting the components as epochs."""
data = ica._transform_epochs(epochs, concatenate=True)
eog_chs = pick_types(epochs.info, meg=False, eog=True, ref_meg=False)
ecg_chs = pick_types(epochs.info, meg=False, ecg=True, ref_meg=False)
c_names = ['ICA %03d' % x for x in range(ica.n_components_)]
ch_types = np.repeat('misc', ica.n_components_)
for eog_idx in eog_chs:
c_names.append(epochs.ch_names[eog_idx])
ch_types = np.append(ch_types, 'eog')
for ecg_idx in ecg_chs:
c_names.append(epochs.ch_names[ecg_idx])
ch_types = np.append(ch_types, 'ecg')
extra_picks = np.append(eog_chs, ecg_chs).astype(int)
if len(extra_picks) > 0:
eog_ecg_data = np.concatenate(epochs.get_data()[:, extra_picks],
axis=1)
data = np.append(data, eog_ecg_data, axis=0)
scalings = _handle_default('scalings_plot_raw')
scalings['misc'] = 5.0
info = create_info(ch_names=c_names, sfreq=epochs.info['sfreq'],
ch_types=ch_types)
info['projs'] = list()
info['bads'] = [c_names[x] for x in exclude]
if title is None:
title = 'ICA components'
if picks is None:
picks = list(range(ica.n_components_))
if start is None:
start = 0
if stop is None:
stop = start + 20
stop = min(stop, len(epochs.events))
for idx in range(len(extra_picks)):
picks = np.append(picks, ica.n_components_ + idx)
n_epochs = stop - start
if n_epochs <= 0:
raise RuntimeError('Stop must be larger than start.')
params = {'ica': ica,
'epochs': epochs,
'info': info,
'orig_data': data,
'bads': list(),
'bad_color': (1., 0., 0.),
't_start': start * len(epochs.times)}
params['label_click_fun'] = partial(_label_clicked, params=params)
_prepare_mne_browse_epochs(params, projs=list(), n_channels=20,
n_epochs=n_epochs, scalings=scalings,
title=title, picks=picks,
order=['misc', 'eog', 'ecg'])
params['plot_update_proj_callback'] = _update_epoch_data
_update_epoch_data(params)
params['hsel_patch'].set_x(params['t_start'])
callback_close = partial(_close_epochs_event, params=params)
params['fig'].canvas.mpl_connect('close_event', callback_close)
try:
plt_show(show, block=block)
except TypeError: # not all versions have this
plt_show(show)
return params['fig']
def _update_epoch_data(params):
"""Function for preparing the data on horizontal shift."""
start = params['t_start']
n_epochs = params['n_epochs']
end = start + n_epochs * len(params['epochs'].times)
data = params['orig_data'][:, start:end]
types = params['types']
for pick, ind in enumerate(params['inds']):
params['data'][pick] = data[ind] / params['scalings'][types[pick]]
params['plot_fun']()
def _close_epochs_event(events, params):
"""Function for excluding the selected components on close."""
info = params['info']
exclude = [info['ch_names'].index(x) for x in info['bads']
if x.startswith('ICA')]
params['ica'].exclude = exclude
def _label_clicked(pos, params):
"""Function for plotting independent components on click to label."""
import matplotlib.pyplot as plt
offsets = np.array(params['offsets']) + params['offsets'][0]
line_idx = np.searchsorted(offsets, pos[1]) + params['ch_start']
if line_idx >= len(params['picks']):
return
ic_idx = [params['picks'][line_idx]]
types = list()
info = params['ica'].info
if len(pick_types(info, meg=False, eeg=True, ref_meg=False)) > 0:
types.append('eeg')
if len(pick_types(info, meg='mag', ref_meg=False)) > 0:
types.append('mag')
if len(pick_types(info, meg='grad', ref_meg=False)) > 0:
types.append('grad')
ica = params['ica']
data = np.dot(ica.mixing_matrix_[:, ic_idx].T,
ica.pca_components_[:ica.n_components_])
data = np.atleast_2d(data)
fig, axes = _prepare_trellis(len(types), max_col=3)
for ch_idx, ch_type in enumerate(types):
try:
data_picks, pos, merge_grads, _, _ = _prepare_topo_plot(ica,
ch_type,
None)
except Exception as exc:
logger.warning(exc)
plt.close(fig)
return
this_data = data[:, data_picks]
ax = axes[ch_idx]
if merge_grads:
from ..channels.layout import _merge_grad_data
for ii, data_ in zip(ic_idx, this_data):
ax.set_title('IC #%03d ' % ii + ch_type, fontsize=12)
data_ = _merge_grad_data(data_) if merge_grads else data_
plot_topomap(data_.flatten(), pos, axis=ax, show=False)
ax.set_yticks([])
ax.set_xticks([])
ax.set_frame_on(False)
tight_layout(fig=fig)
fig.subplots_adjust(top=0.95)
fig.canvas.draw()
plt_show(True)
|
bsd-3-clause
| 7,818,122,220,882,212,000
| 36.397311
| 79
| 0.593475
| false
| 3.590914
| false
| false
| false
|
kernsuite-debian/lofar
|
MAC/Deployment/data/Coordinates/calc_coordinates.py
|
1
|
5787
|
#!/usr/bin/env python
# coding: iso-8859-15
import sys
import pgdb
import pg
from copy import deepcopy
from optparse import OptionParser
import getpass
from database import getDBname, getDBhost, getDBport, getDBuser
INTRO = """
Conversion between ETRS89 and ITRS2000 coordinates based on
Memo : Specifications for reference frame fixing in the analysis of a
EUREF GPS campaign
By Claude Boucher and Zuheir Altamimi
which is available from EUREF
In this utility I use the translational coefficients obtained by method "A" in
section 4 and the rotational coefficients in section 5, both for the 2000 (00)
reference frame.
"""
def subtract(a, b):
return [x - y for x, y in zip(a, b)]
def print_help():
print("Usage: calc_coordinates <stationname> <objecttype> date")
print(" <objecttype>: LBA|HBA|HBA0|HBA1|marker")
print(" <date> : yyyy.yy e.g. 2008.75 for Oct 1st 2008")
def solve(m, y):
"""
solve Mx=y. The algorithm is Gauss-Jordan elimination
without pivoting, which is allowed in this case as M is
dominated by the diagonal.
"""
dim = len(y)
a = deepcopy(m)
sol = deepcopy(y)
if (len(a) != len(a[0])) or len(a[0]) != len(y):
raise 'Incompatible dimensions'
for row in range(dim):
scale = 1./float(a[row][row])
a[row] = [x*scale for x in a[row]]
sol[row] = scale*float(sol[row])
for ix in range(dim):
if ix != row:
factor = float(a[ix][row])
a[ix] = subtract(a[ix], [factor*float(x) for x in a[row]])
a[ix][row] = 0.0
sol[ix] -= factor*float(sol[row])
return sol
def convert(xetrs, date_years, trans):
"""
Solve equation:
/X\Etrs /T0\ = [[ 1 , -R2*dt, R1*dt] /X\Itrs2000
|Y| - |T1| [ R2*dt , 1 , -R0*dt] |Y|
\Z/ \T2/ [ -R1*dt , R0*dt , 1]] \Z/
"""
#
# get translate parameters from database
# ref-frame = trans[0]
# TOO = trans[1:4] = Tx,Ty,Tz
# mas = trans[5:8] = Rx,Ry,Rz
# diagonal(sf) = trans[4] + 1 = sf
#
t00 = [float(t) for t in trans[1:4]] # meters
rdot00 = [float(t) for t in trans[5:8]] # mas
# print "T00=[%e %e %e] Rdot00=[%e %e %e]" % (t00[0], t00[1], t00[2],
# rdot00[0], rdot00[1], rdot00[2])
dt = date_years - 1989.0
# print 'date_years=%f dt=%f' %(date_years, dt)
sf = float(trans[4]) + 1.
# print 'sf=',sf
matrix = [[sf, -rdot00[2]*dt, rdot00[1]*dt],
[rdot00[2]*dt, sf, -rdot00[0]*dt],
[-rdot00[1]*dt, rdot00[0]*dt, sf]]
xshifted = subtract(xetrs, t00)
# print "Matrix=", matrix
return solve(matrix, xshifted)
#
# MAIN
#
if __name__ == '__main__':
parser = OptionParser("""Usage: %prog [options] <stationname> <objecttype> date
<objecttype>: LBA|HBA|HBA0|HBA1|marker
<date> : yyyy.yy e.g. 2008.75 for Oct 1st 2008""")
parser.add_option("-D", "--database",
dest="dbName",
type="string",
default=getDBname(),
help="Name of StationCoordinates database to use")
parser.add_option("-H", "--host",
dest="dbHost",
type="string",
default=getDBhost(),
help="Hostname of StationCoordinates database")
parser.add_option("-P", "--port",
dest="dbPort",
type="int",
default=getDBport(),
help="Port of StationCoordinates database")
parser.add_option("-U", "--user",
dest="dbUser",
type="string",
default=getDBuser(),
help="Username of StationCoordinates database")
# parse arguments
(options, args) = parser.parse_args()
dbName = options.dbName
dbHost = options.dbHost
dbPort = options.dbPort
dbUser = options.dbUser
# print sys.argv
if len(args) != 3:
parser.print_help()
sys.exit(1)
station_name = str(args[0]).upper()
object_type = str(args[1]).upper()
date_years = float(args[2])
dbPassword = getpass.getpass()
host = "{}:{}".format(dbHost, dbPort)
db1 = pgdb.connect(user=dbUser, host=host, database=dbName, password=dbPassword)
cursor = db1.cursor()
# calling stored procedures only works from the pg module for some reason.
db2 = pg.connect(user=dbUser, host=dbHost, dbname=dbName, port=dbPort, passwd=dbPassword)
cursor.execute("select * from get_transformation_info('ITRF2005')")
trans = cursor.fetchone()
cursor.execute("select * from get_ref_objects(%s, %s)", (str(sys.argv[1]).upper(), str(sys.argv[2]).upper()))
print("\n%s %s %8.3f" %(str(sys.argv[1]).upper(), str(sys.argv[2]).upper(),float(sys.argv[3])))
while (1):
record = cursor.fetchone()
if record is None:
print('record even = None')
break
# print record
XEtrs = [float(record[4]),
float(record[5]),
float(record[6])]
# print 'XEtrs=',XEtrs
XItrs2000 = convert(XEtrs, date_years, trans)
# write output to generated_coord ??
print("%s %d %14.6f %14.6f %14.6f" %(str(record[1]), record[2], XItrs2000[0], XItrs2000[1],XItrs2000[2]))
db2.query("select * from add_gen_coord('%s','%s',%s,%s,%s,%s,%s,'%s')" %\
(record[0], record[1], record[2], XItrs2000[0], XItrs2000[1], XItrs2000[2], date_years, 'ITRF2005'))
#record = None
db1.close()
db2.close()
sys.exit(0)
|
gpl-3.0
| -5,707,530,155,973,449,000
| 31.880682
| 122
| 0.544496
| false
| 3.190187
| false
| false
| false
|
zoni/pushover-cli
|
pushover_cli.py
|
1
|
2324
|
#!/usr/bin/env python
# Copyright (c) 2013 Nick Groenen <nick@groenen.me>
import argparse
import chump
def main():
parser = argparse.ArgumentParser(description="Simple pushover client")
parser.add_argument('--token', required=True, help="your application's API token")
parser.add_argument('--user', required=True, help="the user/group key (not e-mail address) of your user (or you)")
parser.add_argument('--message', required=True, help="your message")
parser.add_argument('--title', default=None, help="your message's title, otherwise your app's name is used")
parser.add_argument('--url', default=None, help="a supplementary URL to show with your message")
parser.add_argument('--url-title', default=None, help="a title for your supplementary URL, otherwise just the URL is shown")
parser.add_argument('--device', default=None, help="your user's device name to send the message directly to that device, rather than all of the user's devices")
parser.add_argument('--priority', default=0, help="send as -1 to always send as a quiet notification, 1 to display as high-priority and bypass the user's quiet hours, or 2 to also require confirmation from the user")
parser.add_argument('--callback', default=None, help="a publicly-accessible URL the Pushover servers will send a request to when the user has acknowledged your notification")
parser.add_argument('--retry', default=30, help="how often (in seconds) to repeat the notification to the user in case of an emergency priority")
parser.add_argument('--expire', default=86400, help="how many seconds your notification will continue to be retried for (every retry seconds) in case of an emergency priority")
parser.add_argument('--sound', default=None, help="the name of one of the sounds supported by device clients to override the user's default sound choice")
args = parser.parse_args()
app = chump.Application(args.token)
user = app.get_user(args.user)
user.send_message(
args.message,
title=args.title,
url=args.url,
url_title=args.url_title,
device=args.device,
priority=args.priority,
callback=args.callback,
retry=args.retry,
expire=args.expire,
sound=args.sound,
)
if __name__ == "__main__":
main()
|
mit
| -6,797,070,299,818,647,000
| 53.046512
| 220
| 0.70568
| false
| 3.905882
| false
| false
| false
|
aminhp93/learning_python
|
src/comments/migrations/0001_initial.py
|
1
|
1264
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-31 16:25
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('timestamp', models.DateTimeField(auto_now_add=True)),
('object_id', models.PositiveIntegerField()),
('content_type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType')),
('parent', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='comments.Comment')),
('user', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
mit
| 909,350,399,107,684,600
| 38.5
| 137
| 0.631329
| false
| 4.199336
| false
| false
| false
|
jhoenicke/mempool
|
eth/txpool_parse.py
|
1
|
2468
|
#!/usr/bin/env python3
import json
import sys
import time
from subprocess import PIPE, Popen
FEELIMIT = [0.0001, 1, 2, 3, 4, 5, 6, 7, 8, 10,
12, 14, 17, 20, 25, 30, 40, 50, 60, 70, 80, 100,
120, 140, 170, 200, 250, 300, 400, 500, 600, 700, 800, 1000,
1200, 1400, 1700, 2000, 2500, 3000, 4000, 5000, 6000, 7000, 8000, 10000]
sizes = [0] * len(FEELIMIT)
count = [0] * len(FEELIMIT)
fees = [0] * len(FEELIMIT)
found = False
lastfrom = ""
lastgprice = 0
def parse_txdata(obj):
global sizes, count, fees, found, lastfrom, lastgprice
try:
firstval = next(iter(obj.values()));
if "gasPrice" in firstval:
# effective gas price is the gas that miners use
# to determine if to mine a transaction. It is the
# minimum of the gas price and the effective gas price
# of the previous unconfirmed transaction with a smaller
# nonce. We set effgprice to a very large value initially,
# so that it doesn't effect the gas price of the first
# trnasaction.
effgprice = 1e18;
# sort the txes by nonce
for k in sorted(obj.keys(), key=int):
tx = obj[k]
gprice = int(tx["gasPrice"], 0)
gas = int(tx["gas"], 0)
size = gas
gprice = gprice / 1000000000
effgprice = min(effgprice, gprice)
found = True
for i, limit in enumerate(FEELIMIT):
if (effgprice >= limit and
(i == len(FEELIMIT) - 1 or effgprice < FEELIMIT[i+1])):
sizes[i] += size
count[i] += 1
# Fees in ETH
fees[i] += round(gprice * gas)
break
return None
return obj
except:
return obj
def dump_data(timestamp, sizes, count, fees):
sizesstr = ",".join(str(x) for x in sizes)
countstr = ",".join(str(x) for x in count)
feesstr = ",".join(str(x) for x in fees)
print("[{:d},[{}],[{}],[{}]],"
.format(timestamp, countstr, sizesstr, feesstr))
def main():
global sizes, count, fees, found
timestamp = int(time.time())
try:
output = json.load(sys.stdin, object_hook=parse_txdata)
except:
pass
if found:
dump_data(timestamp, sizes, count, fees)
main()
|
agpl-3.0
| 7,091,031,645,175,694,000
| 32.808219
| 84
| 0.522285
| false
| 3.54089
| false
| false
| false
|
zegra1989/pytree
|
bplustree.py
|
1
|
17009
|
# -*- coding:utf-8 -*-
# 使用 UTF-8
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
class NameNode(object):
def __init__(self, degree, optimize=3):
super(NameNode, self).__init__()
self.num = 0
self.degree = degree
self.threshold = degree*2
self.keys = [None for _ in xrange(self.threshold)]
self.pnodes = [None for _ in xrange(self.threshold)]
self.isleaf = True
def pointer(self):
return self
def __str__(self):
return "num:{0} keys:{1}".format(
self.num, self.keys[:self.num])
class DataNode(object):
"""docstring for DataNode"""
F_INCREASE = 0
F_DECREASE = 1
def __init__(self, max_length=10, optimize=3):
super(DataNode, self).__init__()
self.data = None
self.max_length = max_length
base, mode = divmod(self.max_length, 2)
if mode > 0:
base += 1
self.min_length = base
self.num = 0
# 记录上一次插入的数据
self.last_insert_pos = None
# 连续递增标识
self.is_increase = None
# 记录同一方向连续插入的数量
self.n_directions = 0
# 当同方向连续插入到达 n_optimize 时才会启动 split 优化
self.n_optimize = optimize
self.prev = None
self.next = None
def link(self, prev_node=None, next_node=None):
if prev_node is not None:
tmp = self.prev
self.prev = prev_node
prev_node.prev = tmp
prev_node.next = self
if prev_node.prev is not None:
prev_node.prev.next = prev_node
if next_node is not None:
tmp = self.next
self.next = next_node
next_node.next = tmp
next_node.prev = self
if next_node.next is not None:
next_node.next.prev = next_node
def insert(self, data, doc):
raise NotImplementedError()
def update(self, data, doc):
raise NotImplementedError()
def pop(self, num=1):
raise NotImplementedError()
def isfull(self):
raise NotImplementedError()
def isguarenteed(self):
raise NotImplementedError()
def split(self, mode=None):
raise NotImplementedError()
def merge(self, datanode):
raise NotImplementedError()
@property
def low_key(self):
return self._low_key
class BPlusTree(object):
def __init__(self, degree):
super(BPlusTree, self).__init__()
self.degree = degree
self.threshold = degree*2
self.root = self.allocate_namenode()
def allocate_namenode(self):
raise NotImplementedError()
def deallocate_namenode(self, node):
raise NotImplementedError()
def allocate_datanode(self):
raise NotImplementedError()
def deallocate_datanode(self, node):
raise NotImplementedError()
def save_docs(self, metanode):
raise NotImplementedError()
def load_docs(self, metanode, ipos):
raise NotImplementedError()
def remove(self, key):
res = self.remove_key(self.root, key)
self.shrink()
return res
def shrink(self):
if self.root.num == 1 and self.root.isleaf is False:
old_root = self.root
self.root = old_root.pnodes[0]
self.deallocate_namenode(old_root)
def update(self, key, doc):
docs = self.search(key)
if docs is None:
node, ipos = self.insert2(key, doc)
return 0
docs = self.load_docs(node, ipos)
docs.update(key, doc)
return 1
def select(self, key):
node = self.search(key)
if node is None:
return None
return node
def search(self, key, node=None):
if node is None:
node = self.root
ipos = node.num-1
while ipos >= 0 and key < node.keys[ipos]:
ipos -= 1
# 如果 ipos<0 则,没有找到对应的key
if ipos < 0:
return None
if node.isleaf is True:
return self.load_docs(node.pnodes[ipos])
return self.search(key, node.pnodes[ipos])
def split(self, parent, ipos, node):
if parent.isleaf is False:
new_node = self.allocate_namenode()
new_node.isleaf = node.isleaf
for i in xrange(0, self.degree):
new_node.keys[i] = node.keys[i+self.degree]
new_node.pnodes[i] = node.pnodes[i+self.degree]
new_node.num = node.num = self.degree
for i in xrange(parent.num-1, ipos-1, -1):
parent.keys[i+1] = parent.keys[i]
parent.pnodes[i+1] = parent.pnodes[i]
parent.keys[ipos+1] = new_node.keys[0]
parent.pnodes[ipos+1] = new_node.pointer()
parent.num += 1
return None
for i in xrange(parent.num-1, ipos-1, -1):
# 此处不会越界,因为在 insert 中有保护
parent.keys[i+1] = parent.keys[i]
parent.pnodes[i+1] = parent.pnodes[i]
# 优化 split 算法
if node.n_directions > node.n_optimize:
# 避开 MySQL Bug #67718
if node.is_increase is True:
# 连续递增插入
new_node = node.split(mode=DataNode.F_INCREASE)
ipos += 1
node.link(next_node=new_node)
else:
# 连续递减插入
new_node = node.split(mode=DataNode.F_DECREASE)
parent.keys[ipos+1] = node.low_key
node.link(prev_node=new_node)
else:
# 基础 split 算法
new_node = node.split()
ipos += 1
node.link(next_node=new_node)
parent.keys[ipos] = new_node.low_key
parent.pnodes[ipos] = new_node
parent.num += 1
return None
def insert_nonfull(self, node, key, doc):
ipos = node.num-1
while ipos >= 0 and key < node.keys[ipos]:
ipos -= 1
# 如果 ipos < 0,则说明要插入点小于当前节点中最小关键词
if ipos < 0:
node.keys[0] = key
ipos = 0
if node.isleaf is True:
datanode = node.pnodes[ipos]
if datanode is None:
datanode = self.allocate_datanode()
node.keys[ipos] = key
node.pnodes[ipos] = datanode
node.num += 1
# 此处不用连接 DataNode 的链表,因为此处仅在初始化时运行一次
if datanode.isfull() is True:
if datanode.is_increase is True and datanode.last_insert_pos > key:
datanode.is_increase = False
datanode.n_directions = 1
elif datanode.is_increase is False and datanode.last_insert_pos < key:
datanode.is_increase = True
datanode.n_directions = 1
self.split(node, ipos, datanode)
if node.keys[ipos+1] < key:
ipos += 1
datanode = node.pnodes[ipos]
datanode.insert(key, doc)
node.keys[ipos] = datanode.low_key
return None
child = node.pnodes[ipos]
if child.num == self.threshold:
self.split(node, ipos, child)
if node.keys[ipos+1] is not None and node.keys[ipos+1] < key:
child = node.pnodes[ipos+1]
return self.insert_nonfull(child, key, doc)
def insert(self, key, doc):
if self.root.num != self.threshold:
return self.insert_nonfull(self.root, key, doc)
old_root = self.root
new_root = self.allocate_namenode()
new_root.isleaf = False
new_root.keys[0] = old_root.keys[0]
new_root.pnodes[0] = old_root.pointer()
new_root.num += 1
self.root = new_root
self.split(new_root, 0, old_root)
return self.insert_nonfull(new_root, key, doc)
def merge(self, node, ipos):
"""
将当前节点 关键词 对应的孩子与其 左/右兄弟 合并
ipos 是 node.keys 中关键词的位置
"""
# 当前节点没有右兄弟
if ipos == node.num-1:
ipos -= 1
child = node.pnodes[ipos]
rchild = node.pnodes[ipos+1]
if node.isleaf is True:
child.merge(rchild)
self.deallocate_datanode(rchild)
else:
irpos = 0
while irpos < rchild.num:
child.keys[child.num+irpos] = rchild.keys[irpos]
child.pnodes[child.num+irpos] = rchild.pnodes[irpos]
irpos += 1
child.num += rchild.num
self.deallocate_namenode(rchild)
inpos = ipos+1
while inpos < node.num-1:
node.keys[inpos] = node.keys[inpos+1]
node.pnodes[inpos] = node.pnodes[inpos+1]
inpos += 1
node.num -= 1
return ipos
def guarantee(self, node, ipos):
"""
确保 node.pnodes[ipos] 拥有至少 t 个关键词
"""
child = node.pnodes[ipos]
if child.num > self.degree:
return ipos
# 如果 ipos = 0,则 child 没有左兄弟
if ipos > 0:
lbrother = node.pnodes[ipos-1]
if lbrother.num > self.degree:
icpos = child.num
while icpos > 0:
child.keys[icpos] = child.keys[icpos-1]
child.pnodes[icpos] = child.pnodes[icpos-1]
icpos -= 1
child.keys[0] = lbrother.keys[lbrother.num-1]
child.pnodes[0] = lbrother.pnodes[lbrother.num-1]
child.num += 1
node.keys[ipos] = child.keys[0]
lbrother.num -= 1
return ipos
# 如果 ipos = node.num-1, 则 child 没有右兄弟
if ipos < node.num-1:
rbrother = node.pnodes[ipos+1]
if rbrother.num > self.degree:
child.keys[child.num] = rbrother.keys[0]
child.pnodes[child.num] = rbrother.pnodes[0]
child.num += 1
irpos = 0
while irpos < rbrother.num-1:
rbrother.keys[irpos] = rbrother.keys[irpos+1]
rbrother.pnodes[irpos] = rbrother.pnodes[irpos+1]
irpos += 1
node.keys[ipos+1] = rbrother.keys[0]
rbrother.num -= 1
return ipos
return self.merge(node, ipos)
def remove_key(self, node, key):
ipos = node.num-1
while ipos >= 0 and key < node.keys[ipos]:
ipos -= 1
# 如果 ipos < 0,则说明没有找到要删除的节点
if ipos < 0:
return None
if node.isleaf is False:
icpos = self.guarantee(node, ipos)
child = node.pnodes[icpos]
self.remove_key(child, key)
node.keys[icpos] = node.pnodes[icpos].keys[0]
return 0
datanode = node.pnodes[ipos]
if datanode.isguarenteed() is True:
datanode.remove(key)
node.keys[ipos] = datanode.low_key
return datanode.low_key
if node.num == 1:
datanode.remove(key)
if datanode.num > 0:
node.keys[ipos] = datanode.low_key
else:
node.num = 0
node.pnodes[0] = None
self.deallocate_datanode(datanode)
return 0
if ipos > 0:
lbrother = node.pnodes[ipos-1]
if lbrother.isguarenteed() is True:
lkey, ldoc = lbrother.pop()
datanode.insert(lkey, ldoc)
node.keys[ipos] = lkey
datanode.remove(key)
node.keys[ipos] = datanode.low_key
return datanode.low_key
if ipos < node.num-1:
rbrother = node.pnodes[ipos+1]
if rbrother.isguarenteed() is True:
rkey, rdoc = rbrother.shift()
datanode.insert(rkey, rdoc)
node.keys[ipos+1] = rbrother.low_key
datanode.remove(key)
node.keys[ipos] = datanode.low_key
return datanode.low_key
ipos = self.merge(node, ipos)
datanode = node.pnodes[ipos]
datanode.remove(key)
node.keys[ipos] = datanode.low_key
return datanode.low_key
def traverse(self, callback, node=None):
pass
def print_node(self, node, string, depth=0):
pass
def __str__(self):
strings = ["*****************************"]
self.print_node(self.root, strings)
return "\n".join(strings).strip() + "\n*****************************\n"
################################################
class MemDataNode(DataNode):
"""docstring for MemDataNode"""
def __init__(self, max_length=4):
super(MemDataNode, self).__init__(max_length)
self.data = {}
def insert(self, key, doc):
if isinstance(doc, list,) is True and len(doc) == 1:
doc = doc[0]
self.data[key] = [doc]
self._low_key = min(self.data.keys())
if self.is_increase is True:
if self.last_insert_pos < key:
self.n_directions += 1
else:
self.is_increase = False
self.n_directions = 1
else:
if self.last_insert_pos > key:
self.n_directions += 1
else:
self.is_increase = True
self.n_directions = 1
self.last_insert_pos = key
self.num += 1
def update(self, key, doc):
docs = self.data.get(key, None)
if docs is not None:
docs.append(doc)
else:
self.data[key] = [doc]
self.num += 1
self._low_key = min(self.data.keys())
def remove(self, key):
del self.data[key]
self.num -= 1
if len(self.data) > 0:
self._low_key = min(self.data.keys())
else:
self._low_key = None
def isfull(self):
return self.num == self.max_length
def isguarenteed(self):
return self.num > self.min_length
def pop(self):
key = sorted(self.data)[-1]
doc = self.data.pop(key)
if len(self.data) == 0:
self._low_key = None
self.num -= 1
return key, doc
def shift(self):
key = sorted(self.data)[0]
doc = self.data.pop(key)
if len(self.data) == 0:
self._low_key = None
else:
self._low_key = min(self.data.keys())
self.num -= 1
return key, doc
def split(self, mode=None):
new_node = MemDataNode(self.max_length)
if mode is DataNode.F_INCREASE:
key, doc = self.pop()
new_node.insert(key, doc)
self.num -= 1
elif mode is DataNode.F_DECREASE:
key, doc = self.shift()
new_node.insert(key, doc)
self.num -= 1
else:
for key in sorted(self.data)[self.min_length:]:
new_node.insert(key, self.data.pop(key))
self.num -= 1
return new_node
def merge(self, datanode):
self.data.update(datanode.data)
self.num = len(self.data)
def __str__(self):
keys = sorted(self.data.keys())
values = map(lambda x: self.data[x], keys)
return "num:{0} keys:{1} docs:{2}, increase:{3}".format(
len(self.data), keys, values, self.n_directions)
class MemBPlusTree(BPlusTree):
"""docstring for MemBPlusTree"""
def __init__(self, degree):
super(MemBPlusTree, self).__init__(degree)
def allocate_namenode(self):
return NameNode(self.degree)
def deallocate_namenode(self, node):
pass
def allocate_datanode(self):
return MemDataNode()
def deallocate_datanode(self, node):
pass
def load_docs(self, datanode):
return datanode
def print_node(self, node, strings, depth=0):
if node is None:
return
strings.append(">"*depth + str(node))
if node.isleaf is False:
strings.append("")
for ipos in xrange(node.num):
self.print_node(node.pnodes[ipos], strings, depth+1)
strings.append("")
else:
for ipos in xrange(node.num):
strings.append(">"*(depth+1) + str(node.pnodes[ipos]))
def __str__(self):
strings = ["*****************************"]
self.print_node(self.root, strings)
return "\n".join(strings).strip() + "\n*****************************\n"
|
mit
| 5,075,025,887,707,922,000
| 28.057895
| 86
| 0.513796
| false
| 3.376758
| false
| false
| false
|
julienmalard/Tinamit
|
tinamit/mod/var.py
|
1
|
3075
|
import numpy as np
import xarray as xr
from tinamit.config import _
class Variable(object):
"""La clase más general para variables de modelos en Tinamït."""
def __init__(símismo, nombre, unid, ingr, egr, inic=0, líms=None, info=''):
"""
Parameters
----------
nombre: str
El nombre del variable.
unid: str or None
Las unidades del variable.
ingr: bool
Si es un ingreso al modelo.
egr: bool
Si es un egreso del modelo.
inic: int or float or np.ndarray
El valor inicial del modelo.
líms: tuple
Los límites del variable.
info: str
Descripción detallada del variable.
"""
if not (ingr or egr):
raise ValueError(_('Si no es variable ingreso, debe ser egreso.'))
símismo.nombre = nombre
símismo.unid = unid
símismo.ingr = ingr
símismo.egr = egr
símismo.inic = _a_np(inic)
símismo.dims = símismo.inic.shape
símismo.líms = _proc_líms(líms)
símismo.info = info
símismo._val = símismo.inic.astype(float)
def poner_val(símismo, val):
"""
Establece el valor del variable.
Parameters
----------
val: int or float or np.ndarray
El nuevo valor.
"""
if isinstance(val, np.ndarray) and val.size == 1:
val = val[0]
if isinstance(val, np.ndarray):
existen = np.invert(np.isnan(val)) # No cambiamos nuevos valores que faltan
símismo._val[existen] = val[existen]
elif not np.isnan(val):
símismo._val[:] = val
def obt_val(símismo):
"""
Devuelve el valor del variable.
"""
return símismo._val # para disuadir modificaciones directas a `símismo._val`
def reinic(símismo):
"""
Reinicializa el variable a su valor pre-simulación.
"""
símismo._val[:] = símismo.inic
def __iadd__(símismo, otro):
símismo.poner_val(símismo._val + otro)
return símismo
def __imul__(símismo, otro):
símismo.poner_val(símismo._val * otro)
def __imod__(símismo, otro):
símismo.poner_val(símismo._val % otro)
def __ifloordiv__(símismo, otro):
símismo.poner_val(símismo._val // otro)
def __ipow__(símismo, otro):
símismo.poner_val(símismo._val ** otro)
def __str__(símismo):
return símismo.nombre
def _a_np(val):
if isinstance(val, xr.DataArray):
val = val.values
if isinstance(val, np.ndarray):
if val.shape:
return val
return np.array([val])
elif isinstance(val, (int, float, np.number)):
return np.array([val])
else:
return np.array(val)
def _proc_líms(líms):
if líms is None:
return -np.inf, np.inf
else:
return -np.inf if líms[0] is None else líms[0], np.inf if líms[1] is None else líms[1]
|
gpl-3.0
| -3,288,465,236,079,383,000
| 26.198198
| 94
| 0.556476
| false
| 3.074338
| false
| false
| false
|
InfoAgeTech/django-umanage
|
umanage/accounts/views.py
|
1
|
3423
|
from __future__ import unicode_literals
from django.conf import settings
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from django.views.generic.base import TemplateView
from django.views.generic.edit import FormView
from django_core.utils.loading import get_class_from_settings_full_path
from django_core.views.mixins.auth import LoginRequiredViewMixin
from ..exceptions import UManageSettingImproperlyConfigured
from .forms import UserAccountForm
from inspect import isfunction
class AccountView(LoginRequiredViewMixin, TemplateView):
template_name = 'umanage/accounts/account_view.html'
def get_context_data(self, **kwargs):
context = super(AccountView, self).get_context_data(**kwargs)
user = self.request.user
settings_key = 'UMANAGE_USER_ACCOUNT_DISPLAY_FIELDS'
user_fields_to_display = getattr(settings,
settings_key,
('first_name', 'last_name', 'email'))
if not isinstance(user_fields_to_display, (tuple, list)):
raise UManageSettingImproperlyConfigured(settings_key)
fields_to_display = []
for field_name in user_fields_to_display:
label = None
if isinstance(field_name, (list, tuple)):
label = field_name[0]
field_name = field_name[1]
try:
val = getattr(user, field_name)
if isfunction(val):
# it's a function, call the function and get the results
val = val()
if not label:
field = user._meta.get_field(field_name)
label = field.verbose_name
except AttributeError:
raise UManageSettingImproperlyConfigured(
settings_key,
message=_('"{0}" is not a valid field on the User model. '
'Check the "{1}" config '
'setting.').format(field_name, settings_key)
)
fields_to_display.append((label.title(), val))
context['fields_to_display'] = fields_to_display
return context
class AccountEditView(LoginRequiredViewMixin, FormView):
template_name = 'umanage/accounts/account_edit.html'
form_class = UserAccountForm
def dispatch(self, *args, **kwargs):
settings_key = 'UMANAGE_USER_ACCOUNT_EDIT_FORM'
if hasattr(settings, settings_key):
try:
self.form_class = get_class_from_settings_full_path(settings_key)
except:
msg = _('{0} setting path is either incorrect or the app is '
'not installed. Please check the '
'configuration.').format(settings_key)
raise UManageSettingImproperlyConfigured(settings_key, msg)
return super(AccountEditView, self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(AccountEditView, self).get_form_kwargs()
kwargs['instance'] = self.request.user
kwargs['request'] = self.request
return kwargs
def form_valid(self, form):
form.save()
return super(AccountEditView, self).form_valid(form)
def get_success_url(self):
return reverse('umanage_account_view')
|
mit
| -5,431,782,645,010,916,000
| 36.206522
| 81
| 0.603856
| false
| 4.428202
| true
| false
| false
|
erdc/proteus
|
proteus/mprans/Dissipation.py
|
1
|
70664
|
from __future__ import division
from builtins import range
from past.utils import old_div
import proteus
from proteus.mprans.cDissipation import *
from proteus.mprans.cDissipation2D import *
import numpy as np
from proteus import Profiling as prof
from proteus import cfemIntegrals
from . import cArgumentsDict
"""
NOTES:
Hardwired Numerics include:
lagging all terms from Navier-Stokes, Kappa equations
same solution space for velocity from Navier-Stokes and Dissipation
equations
This can be removed by saving gradient calculations in N-S and lagging
rather than passing degrees of freedom between models
"""
class SubgridError(proteus.SubgridError.SGE_base):
def __init__(self, coefficients, nd):
proteus.SubgridError.SGE_base.__init__(self, coefficients, nd, lag=False)
def initializeElementQuadrature(self, mesh, t, cq):
pass
def updateSubgridErrorHistory(self, initializationPhase=False):
pass
def calculateSubgridError(self, q):
pass
class ShockCapturing(proteus.ShockCapturing.ShockCapturing_base):
def __init__(self, coefficients, nd, shockCapturingFactor=0.25, lag=True, nStepsToDelay=None):
proteus.ShockCapturing.ShockCapturing_base.__init__(self, coefficients, nd, shockCapturingFactor, lag)
self.nStepsToDelay = nStepsToDelay
self.nSteps = 0
if self.lag:
prof.logEvent("Kappa.ShockCapturing: lagging requested but must lag the first step; switching lagging off and delaying")
self.nStepsToDelay = 1
self.lag = False
def initializeElementQuadrature(self, mesh, t, cq):
self.mesh = mesh
self.numDiff = []
self.numDiff_last = []
for ci in range(self.nc):
self.numDiff.append(cq[('numDiff', ci, ci)])
self.numDiff_last.append(cq[('numDiff', ci, ci)])
def updateShockCapturingHistory(self):
self.nSteps += 1
if self.lag:
for ci in range(self.nc):
self.numDiff_last[ci][:] = self.numDiff[ci]
if self.lag == False and self.nStepsToDelay is not None and self.nSteps > self.nStepsToDelay:
prof.logEvent("Dissipation.ShockCapturing: switched to lagged shock capturing")
self.lag = True
self.numDiff_last = []
for ci in range(self.nc):
self.numDiff_last.append(self.numDiff[ci].copy())
prof.logEvent("Dissipation: max numDiff %e" % (proteus.Comm.globalMax(self.numDiff_last[0].max()),))
class NumericalFlux(proteus.NumericalFlux.Advection_DiagonalUpwind_Diffusion_IIPG_exterior):
def __init__(self, vt, getPointwiseBoundaryConditions,
getAdvectiveFluxBoundaryConditions,
getDiffusiveFluxBoundaryConditions):
proteus.NumericalFlux.Advection_DiagonalUpwind_Diffusion_IIPG_exterior.__init__(self, vt, getPointwiseBoundaryConditions,
getAdvectiveFluxBoundaryConditions,
getDiffusiveFluxBoundaryConditions)
class Coefficients(proteus.TransportCoefficients.TC_base):
"""Basic k-epsilon model for incompressible flow from Hutter etal
Chaper 11 or k-omega (Wilcox 1998).
"""
# Solves for just dissipation variable (epsilon, or omega) assuming
# kappa (intensity) computed independently and lagged in time
# \bar{\vec v} = <\vec v> Reynolds-averaged (mean) velocity
# \vec v^{'} = turbulent fluctuation
# assume \vec v = <\vec v> + \vec v^{'}, with <\vec v^{'}> = 0
# Reynolds averaged NS equations
# \deld \bar{\vec v} = 0
# \pd{\bar{\vec v}}{t} + \deld \left(\bar{\vec v} \outer \bar{\vec v}\right)
# -\nu \deld \ten \bar{D} + \frac{1}{\rho}\grad \bar p
# - \frac{1}{rho}\deld \ten{R} = 0
# Reynolds stress term
# \ten R = -\rho <\vec v^{'}\outer \vec v^{'}>
# \frac{1}{\rho}\ten{R} = 2 \nu_t \bar{D} - \frac{2}{3}k\ten{I}
# D_{ij}(\vec v) = \frac{1}{2} \left( \pd{v_i}{x_j} + \pd{v_j}{x_i})
# \ten D \bar{\ten D} = D(<\vec v>), \ten D^{'} = \ten D(\vec v^{'})
# k-epsilon tranport equations
# \pd{k}{t} + \deld (k\bar{\vec v})
# - \deld\left[\left(\frac{\nu_t}{\sigma_k} + \nu\right)\grad k \right]
# - 4\nu_t \Pi_{D} + \epsilon = 0
# \pd{\varepsilon}{t} + \deld (\varepsilon \bar{\vec v})
# - \deld\left[\left(\frac{\nu_t}{\sigma_\varepsilon} + \nu\right)\grad \varepsilon \right]
# - 4c_1 k \Pi_{D} + c_2 \frac{\epsilon^2}{k} = 0
# k -- turbulent kinetic energy = <\vec v^{'}\dot \vec v^{'}>
# \varepsilon -- turbulent dissipation rate = 4 \nu <\Pi_{D^{'}}>
# \nu -- kinematic viscosity (\mu/\rho)
# \nu_t -- turbulent viscosity = c_mu \frac{k^2}{\varepsilon}
# \Pi_{\ten A} = \frac{1}{2}tr(\ten A^2) = 1/2 \ten A\cdot \ten A
# \ten D \cdot \ten D = \frac{1}{4}\left[ (4 u_x^2 + 4 v_y^2 +
# 1/2 (u_y + v_x)^2 \right]
# 4 \Pi_{D} = 2 \frac{1}{4}\left[ (4 u_x^2 + 4 v_y^2 +
# 1/2 (u_y + v_x)^2 \right]
# = \left[ (2 u_x^2 + 2 v_y^2 + (u_y + v_x)^2 \right]
# \sigma_k -- Prandtl number \approx 1
# \sigma_e -- c_{\mu}/c_e
# c_{\mu} = 0.09, c_1 = 0.126, c_2 = 1.92, c_{\varepsilon} = 0.07
# """
from proteus.ctransportCoefficients import kEpsilon_k_3D_Evaluate_sd
from proteus.ctransportCoefficients import kEpsilon_k_2D_Evaluate_sd
def __init__(self,
VOS_model=None, # Solid model
V_model=None, # Fluid model
LS_model=None,
RD_model=None,
kappa_model=None,
ME_model=None,
SED_model=None,
dissipation_model_flag=1, # default K-Epsilon, 2 --> K-Omega 1998, 3 --> K-Omega 1988
c_mu=0.09,
c_1=0.126,
c_2=1.92,
c_e=0.07,
sigma_e=1.29,
rho_0=998.2,
nu_0=1.004e-6,
rho_1=1.205,
nu_1=1.500e-5,
g=[0.0, -9.8],
nd=3,
epsFact=0.01,
useMetrics=0.0,
sc_uref=1.0,
sc_beta=1.0,
default_kappa=1.0e-3,
closure=None,
nullSpace='NoNullSpace',
initialize=True):
self.useMetrics = useMetrics
self.dissipation_model_flag = dissipation_model_flag # default K-Epsilon, 2 ==> K-Omega 1998, 3 --> K-Omega 1988
self.variableNames = ['epsilon']
self.nd = nd
self.rho_0 = rho_0
self.nu_0 = nu_0
self.rho_1 = rho_1
self.rho = rho_0
self.nu_1 = nu_1
self.c_mu = c_mu
self.c_1 = c_1
self.c_2 = c_2
self.c_e = c_e
self.sigma_e = sigma_e
self.g = g
self.epsFact = epsFact
self.flowModelIndex = V_model
self.modelIndex = ME_model
self.RD_modelIndex = RD_model
self.LS_modelIndex = LS_model
self.VOS_modelIndex = VOS_model
self.SED_modelIndex = SED_model
self.kappa_modelIndex = kappa_model
self.sc_uref = sc_uref
self.sc_beta = sc_beta
self.nullSpace = nullSpace
# for debugging model
self.default_kappa = default_kappa
self.closure = closure
if initialize:
self.initialize()
def initialize(self):
if self.dissipation_model_flag >= 2:
self.variableNames = ['omega']
#
nc = 1
mass = {0: {0: 'linear'}}
advection = {0: {0: 'linear'}}
hamiltonian = {}
potential = {0: {0: 'u'}}
diffusion = {0: {0: {0: 'nonlinear', }}}
reaction = {0: {0: 'nonlinear'}}
if self.nd == 2:
sdInfo = {(0, 0): (np.array([0, 1, 2], dtype='i'),
np.array([0, 1], dtype='i'))}
else:
sdInfo = {(0, 0): (np.array([0, 1, 2, 3], dtype='i'),
np.array([0, 1, 2], dtype='i'))}
proteus.TransportCoefficients.TC_base.__init__(self,
nc,
mass,
advection,
diffusion,
potential,
reaction,
hamiltonian,
self.variableNames,
sparseDiffusionTensors=sdInfo)
closure = self.closure
try:
self.aDarcy=closure.aDarcy
self.betaForch=closure.betaForch
self.grain=closure.grain
self.packFraction=closure.packFraction
self.packMargin=closure.packMargin
self.maxFraction=closure.maxFraction
self.frFraction=closure.frFraction
self.sigmaC=closure.sigmaC
self.C3e=closure.C3e
self.C4e=closure.C4e
self.eR=closure.eR
self.fContact=closure.fContact
self.mContact=closure.mContact
self.nContact=closure.nContact
self.angFriction=closure.angFriction
self.vos_limiter = closure.vos_limiter
self.mu_fr_limiter = closure.mu_fr_limiter
self.sedFlag = 1
prof.logEvent("INFO: Loading parameters for sediment closure",2)
except:
self.aDarcy=-1.
self.betaForch=-1.
self.grain=-1.
self.packFraction=-1.
self.packMargin=-1.
self.maxFraction=-1.
self.frFraction=-1.
self.sigmaC=-1.
self.C3e=-1.
self.C4e=-1.
self.eR=-1.
self.fContact=-1.
self.mContact=-1.
self.nContact=-1.
self.angFriction=-1.
self.vos_limiter = -1.
self.mu_fr_limiter = -1.
self.sedFlag=0
assert self.VOS_modelIndex == None
assert self.SED_modelIndex == None
prof.logEvent("Sediment module is off. Loading dummy parameters",2)
def initializeMesh(self, mesh):
self.eps = self.epsFact * mesh.h
def attachModels(self, modelList):
assert self.modelIndex is not None and self.modelIndex < len(
modelList), "Dissipation: invalid index for self model allowed range: [0,%s]" % len(modelList)
# self
self.model = modelList[self.modelIndex]
# redistanced level set
if self.RD_modelIndex is not None:
self.rdModel = modelList[self.RD_modelIndex]
# level set
if self.LS_modelIndex is not None:
self.lsModel = modelList[self.LS_modelIndex]
self.q_phi = modelList[self.LS_modelIndex].q[('u', 0)]
self.ebqe_phi = modelList[self.LS_modelIndex].ebqe[('u', 0)]
if ('u', 0) in modelList[self.LS_modelIndex].ebq:
self.ebq_phi = modelList[self.LS_modelIndex].ebq[('u', 0)]
else:
self.ebq_phi = None
else:
self.q_phi =-np.ones( modelList[self.kappa_modelIndex].q[('u', 0)].shape, 'd')
#self.ebq_phi =-np.ones( modelList[self.dissipation_modelIndex].ebq[('u', 0)].shape, 'd')
self.ebqe_phi = -np.ones( modelList[self.kappa_modelIndex].ebqe[('u', 0)].shape, 'd')
# flow model
self.u_old_dof = np.copy(self.model.u[0].dof)
assert self.flowModelIndex is not None, "Dissipation: invalid index for flow model allowed range: [0,%s]" % len(modelList)
# print "flow model index------------",self.flowModelIndex,modelList[self.flowModelIndex].q.has_key(('velocity',0))
if self.flowModelIndex is not None: # keep for debugging for now
self.model.ebqe['n'][:] = modelList[self.flowModelIndex].ebqe['n']
if ('velocity', 0) in modelList[self.flowModelIndex].q:
self.q_v = modelList[self.flowModelIndex].q[('velocity', 0)]
self.ebqe_v = modelList[self.flowModelIndex].ebqe[('velocity', 0)]
else:
self.q_v = modelList[self.flowModelIndex].q[('f', 0)]
self.ebqe_v = modelList[self.flowModelIndex].ebqe[('f', 0)]
if ('velocity', 0) in modelList[self.flowModelIndex].ebq:
self.ebq_v = modelList[self.flowModelIndex].ebq[('velocity', 0)]
else:
if ('f', 0) in modelList[self.flowModelIndex].ebq:
self.ebq_v = modelList[self.flowModelIndex].ebq[('f', 0)]
#
import copy
self.q_grad_u = modelList[self.flowModelIndex].q[('grad(u)', 1)]
self.q_grad_v = modelList[self.flowModelIndex].q[('grad(u)', 2)]
#
self.ebqe_grad_u = modelList[self.flowModelIndex].ebqe[('grad(u)', 1)]
self.ebqe_grad_v = modelList[self.flowModelIndex].ebqe[('grad(u)', 2)]
if ('grad(u)', 1) in modelList[self.flowModelIndex].ebq:
self.ebq_grad_u = modelList[self.flowModelIndex].ebq[('grad(u)', 1)]
if ('grad(u)', 2) in modelList[self.flowModelIndex].ebq:
self.ebq_grad_v = modelList[self.flowModelIndex].ebq[('grad(u)', 2)]
#
# now allocate the 3D variables
if self.nd == 2:
self.q_grad_w = self.q_grad_v.copy()
self.ebqe_grad_w = self.ebqe_grad_v.copy()
if ('grad(u)', 2) in modelList[self.flowModelIndex].ebq:
self.ebq_grad_w = self.ebq_grad_v.copy()
else:
self.q_grad_w = modelList[self.flowModelIndex].q[('grad(u)', 3)]
self.ebqe_grad_w = modelList[self.flowModelIndex].ebqe[('grad(u)', 3)]
if ('grad(u)', 3) in modelList[self.flowModelIndex].ebq:
self.ebq_grad_w = modelList[self.flowModelIndex].ebq[('grad(u)', 3)]
#
self.velocity_dof_u = modelList[self.flowModelIndex].u[1].dof
self.velocity_dof_v = modelList[self.flowModelIndex].u[2].dof
if self.nd == 2:
self.velocity_dof_w = self.velocity_dof_v.copy()
else:
self.velocity_dof_w = modelList[self.flowModelIndex].u[3].dof
if hasattr(modelList[self.flowModelIndex].coefficients, 'q_porosity'):
self.q_porosity = modelList[self.flowModelIndex].coefficients.q_porosity
else:
self.q_porosity = np.ones(self.q[('u', 0)].shape, 'd')
if hasattr(modelList[self.flowModelIndex].coefficients, 'ebqe_porosity'):
self.ebqe_porosity = modelList[self.flowModelIndex].coefficients.ebqe_porosity
else:
self.ebqe_porosity = np.ones( modelList[self.flowModelIndex].ebqe[('velocity', 0)].shape, 'd')
else:
self.velocity_dof_u = np.zeros(self.model.u[0].dof.shape, 'd')
self.velocity_dof_v = np.zeros(self.model.u[0].dof.shape, 'd')
if self.nd == 2:
self.velocity_dof_w = self.velocity_dof_v.copy()
else:
self.velocity_dof_w = np.zeros(self.model.u[0].dof.shape, 'd')
self.q_porosity = np.ones(self.q[('u', 0)].shape, 'd')
self.ebqe_porosity = np.ones(self.ebqe[('u', 0)].shape, 'd')
#
#assert self.kappa_modelIndex is not None and self.kappa_modelIndex < len(modelList), "Dissipation: invalid index for dissipation model allowed range: [0,%s]" % len(modelList)
if self.kappa_modelIndex is not None: # keep for debugging for now
# assume have q,ebqe always
self.q_kappa = modelList[self.kappa_modelIndex].q[('u', 0)]
self.ebqe_kappa = modelList[self.kappa_modelIndex].ebqe[('u', 0)]
self.q_grad_kappa = modelList[self.kappa_modelIndex].q[('grad(u)', 0)]
if ('u', 0) in modelList[self.kappa_modelIndex].ebq:
self.ebq_kappa = modelList[self.kappa_modelIndex].ebq[('u', 0)]
else:
self.q_kappa = np.zeros(self.model.q[('u', 0)].shape, 'd')
self.q_kappa.fill(self.default_kappa)
self.ebqe_kappa = np.zeros(self.model.ebqe[('u', 0)].shape, 'd')
self.ebqe_kappa.fill(self.default_kappa)
self.q_grad_kappa = np.zeros(self.model.q[('grad(u)', 0)].shape, 'd')
if ('u', 0) in self.model.ebq:
self.ebq_kappa = np.zeros(self.model.ebq[('u', 0)].shape, 'd')
self.ebq_kappa.fill(self.default_kappa)
#
if self.VOS_modelIndex is not None:
self.vosModel = model[self.VOS_modelIndex ]
self.q_vos = modelList[self.VOS_modelIndex].q[('u', 0)]
self.grad_vos = modelList[self.VOS_modelIndex].q[('grad(u)', 0)]
self.ebqe_vos = modelList[self.VOS_modelIndex].ebqe[('u', 0)]
self.ebqe_grad_vos = modelList[self.VOS_modelIndex].ebqe[('grad(u)', 0)]
else:
self.q_vos = self.model.q[('u', 0)]
self.grad_vos = self.model.q[('u', 0)]
self.ebqe_vos = self.model.ebqe[('u', 0)]
self.ebqe_grad_vos = self.model.ebqe[('u', 0)]
if self.SED_modelIndex is not None:
self.rho_s=modelList[self.SED_modelIndex].coefficients.rho_s
self.vs=modelList[self.SED_modelIndex].q[('u', 0)]
self.ebqe_vs=modelList[self.SED_modelIndex].ebqe[('u', 0)]
else:
self.rho_s=self.rho_0
self.vs=self.q_v
self.ebqe_vs=self.ebqe_v
#
def initializeElementQuadrature(self, t, cq):
if self.flowModelIndex is None:
self.q_v = np.ones(cq[('f', 0)].shape, 'd')
self.q_grad_u = np.ones(cq[('grad(u)', 0)].shape, 'd')
self.q_grad_v = np.ones(cq[('grad(u)', 0)].shape, 'd')
if self.nd == 2:
self.q_grad_w = self.q_grad_v.copy()
else:
self.q_grad_w = np.ones(cq[('grad(u)', 0)].shape, 'd')
if self.kappa_modelIndex is None:
self.q_kappa = np.ones(cq[('u', 0)].shape, 'd')
self.q_kappa.fill(self.default_kappa)
self.q_grad_kappa = np.zeros(cq[('grad(u)', 0)].shape, 'd')
def initializeElementBoundaryQuadrature(self, t, cebq, cebq_global):
if self.flowModelIndex is None:
self.ebq_v = np.ones(cebq[('f', 0)].shape, 'd')
self.ebq_grad_u = np.ones(cebq[('grad(u)', 0)].shape, 'd')
self.ebq_grad_v = np.ones(cebq[('grad(u)', 0)].shape, 'd')
if self.nd == 2:
self.ebq_grad_w = self.ebq_grad_v.copy()
else:
self.ebq_grad_w = np.ones(cebq[('grad(u)', 0)].shape, 'd')
if self.kappa_modelIndex is None:
self.ebq_kappa = np.ones(cebq[('u', 0)].shape, 'd')
self.ebq_kappa.fill(self.default_kappa)
def initializeGlobalExteriorElementBoundaryQuadrature(self, t, cebqe):
if self.flowModelIndex is None:
self.ebqe_v = np.ones(cebqe[('f', 0)].shape, 'd')
self.ebqe_grad_u = np.ones(cebqe[('grad(u)', 0)].shape, 'd')
self.ebqe_grad_v = np.ones(cebqe[('grad(u)', 0)].shape, 'd')
self.ebqe_grad_w = np.ones(cebqe[('grad(u)', 0)].shape, 'd')
if self.kappa_modelIndex is None:
self.ebqe_kappa = np.ones(cebqe[('u', 0)].shape, 'd')
self.ebqe_kappa.fill(self.default_kappa)
def preStep(self, t, firstStep=False):
copyInstructions = {}
return copyInstructions
def postStep(self, t, firstStep=False):
self.u_old_dof = np.copy(self.model.u[0].dof)
for eN in range(self.model.q[('u',0)].shape[0]):
for k in range(self.model.q[('u',0)].shape[1]):
self.model.q[('u',0)][eN,k] = max( self.model.q[('u',0)][eN,k], 1e-10)
if ('u', 0) in self.model.ebq:
for eN in range(self.model.ebq[('u',0)].shape[0]):
for k in range(self.model.ebq[('u',0)].shape[1]):
for l in range(len(self.model.ebq[('u',0)][eN,k])):
self.model.ebq[('u',0)][eN,k,l] = max( self.model.ebq[('u',0)][eN,k,l], 1e-10)
for eN in range(self.model.ebqe[('u',0)].shape[0]):
for k in range(self.model.ebqe[('u',0)].shape[1]):
self.model.ebqe[('u',0)][eN,k] = max( self.model.ebqe[('u',0)][eN,k], 1e-10)
copyInstructions = {}
return copyInstructions
def updateToMovingDomain(self, t, c):
# in a moving domain simulation the velocity coming in is already for the moving domain
pass
def evaluate(self, t, c):
# mwf debug
# print "Dissipationcoeficients eval t=%s " % t
if c[('f', 0)].shape == self.q_v.shape:
v = self.q_v
phi = self.q_phi
grad_u = self.q_grad_u
grad_v = self.q_grad_v
grad_w = self.q_grad_w
kappa = self.q_kappa
elif c[('f', 0)].shape == self.ebqe_v.shape:
v = self.ebqe_v
phi = self.ebqe_phi
grad_u = self.ebqe_grad_u
grad_v = self.ebqe_grad_v
grad_w = self.ebqe_grad_w
kappa = self.ebqe_kappa
elif ((self.ebq_v is not None and self.ebq_phi is not None and self.ebq_grad_u is not None and self.ebq_grad_v is not None and self.ebq_grad_w is not None and self.ebq_kappa is not None) and c[('f', 0)].shape == self.ebq_v.shape):
v = self.ebq_v
phi = self.ebq_phi
grad_u = self.ebq_grad_u
grad_v = self.ebq_grad_v
grad_w = self.ebqe_grad_w
kappa = self.ebq_kappa
else:
v = None
phi = None
grad_u = None
grad_v = None
grad_w = None
if v is not None:
if self.nd == 2:
self.kEpsilon_epsilon_2D_Evaluate_sd(self.sigma_e,
self.c_1,
self.c_2,
self.c_mu,
self.c_e,
self.nu,
velocity,
gradu,
gradv,
c[('u', 0)],
kappa,
c[('m', 0)],
c[('dm', 0, 0)],
c[('f', 0)],
c[('df', 0, 0)],
c[('a', 0, 0)],
c[('da', 0, 0, 0)],
c[('r', 0)],
c[('dr', 0, 0)])
else:
self.kEpsilon_epsilon_3D_Evaluate_sd(self.sigma_e,
self.c_1,
self.c_2,
self.c_mu,
self.c_e,
self.nu,
velocity,
gradu,
gradv,
gradw,
c[('u', 0)],
kappa,
c[('m', 0)],
c[('dm', 0, 0)],
c[('f', 0)],
c[('df', 0, 0)],
c[('a', 0, 0)],
c[('da', 0, 0, 0)],
c[('r', 0)],
c[('dr', 0, 0)])
class LevelModel(proteus.Transport.OneLevelTransport):
nCalls = 0
def __init__(self,
uDict,
phiDict,
testSpaceDict,
matType,
dofBoundaryConditionsDict,
dofBoundaryConditionsSetterDict,
coefficients,
elementQuadrature,
elementBoundaryQuadrature,
fluxBoundaryConditionsDict=None,
advectiveFluxBoundaryConditionsSetterDict=None,
diffusiveFluxBoundaryConditionsSetterDictDict=None,
stressTraceBoundaryConditionsSetterDict=None,
stabilization=None,
shockCapturing=None,
conservativeFluxDict=None,
numericalFluxType=None,
TimeIntegrationClass=None,
massLumping=False,
reactionLumping=False,
options=None,
name='defaultName',
reuse_trial_and_test_quadrature=True,
sd = True,
movingDomain=False,
bdyNullSpace=False):
#
# set the objects describing the method and boundary conditions
#
self.bdyNullSpace=bdyNullSpace
self.movingDomain=movingDomain
self.tLast_mesh=None
#
self.name = name
self.sd = sd
self.Hess = False
self.lowmem = True
self.timeTerm = True # allow turning off the time derivative
# self.lowmem=False
self.testIsTrial = True
self.phiTrialIsTrial = True
self.u = uDict
self.ua = {} # analytical solutions
self.phi = phiDict
self.dphi = {}
self.matType = matType
# try to reuse test and trial information across components if spaces are the same
self.reuse_test_trial_quadrature = reuse_trial_and_test_quadrature # True#False
if self.reuse_test_trial_quadrature:
for ci in range(1, coefficients.nc):
assert self.u[ci].femSpace.__class__.__name__ == self.u[0].femSpace.__class__.__name__, "to reuse_test_trial_quad all femSpaces must be the same!"
# Simplicial Mesh
self.mesh = self.u[0].femSpace.mesh # assume the same mesh for all components for now
self.testSpace = testSpaceDict
self.dirichletConditions = dofBoundaryConditionsDict
self.dirichletNodeSetList = None # explicit Dirichlet conditions for now, no Dirichlet BC constraints
self.coefficients = coefficients
self.coefficients.initializeMesh(self.mesh)
self.nc = self.coefficients.nc
self.stabilization = stabilization
self.shockCapturing = shockCapturing
self.conservativeFlux = conservativeFluxDict # no velocity post-processing for now
self.fluxBoundaryConditions = fluxBoundaryConditionsDict
self.advectiveFluxBoundaryConditionsSetterDict = advectiveFluxBoundaryConditionsSetterDict
self.diffusiveFluxBoundaryConditionsSetterDictDict = diffusiveFluxBoundaryConditionsSetterDictDict
# determine whether the stabilization term is nonlinear
self.stabilizationIsNonlinear = False
# cek come back
if self.stabilization is not None:
for ci in range(self.nc):
if ci in coefficients.mass:
for flag in list(coefficients.mass[ci].values()):
if flag == 'nonlinear':
self.stabilizationIsNonlinear = True
if ci in coefficients.advection:
for flag in list(coefficients.advection[ci].values()):
if flag == 'nonlinear':
self.stabilizationIsNonlinear = True
if ci in coefficients.diffusion:
for diffusionDict in list(coefficients.diffusion[ci].values()):
for flag in list(diffusionDict.values()):
if flag != 'constant':
self.stabilizationIsNonlinear = True
if ci in coefficients.potential:
for flag in list(coefficients.potential[ci].values()):
if flag == 'nonlinear':
self.stabilizationIsNonlinear = True
if ci in coefficients.reaction:
for flag in list(coefficients.reaction[ci].values()):
if flag == 'nonlinear':
self.stabilizationIsNonlinear = True
if ci in coefficients.hamiltonian:
for flag in list(coefficients.hamiltonian[ci].values()):
if flag == 'nonlinear':
self.stabilizationIsNonlinear = True
# determine if we need element boundary storage
self.elementBoundaryIntegrals = {}
for ci in range(self.nc):
self.elementBoundaryIntegrals[ci] = ((self.conservativeFlux is not None) or
(numericalFluxType is not None) or
(self.fluxBoundaryConditions[ci] == 'outFlow') or
(self.fluxBoundaryConditions[ci] == 'mixedFlow') or
(self.fluxBoundaryConditions[ci] == 'setFlow'))
#
# calculate some dimensions
#
self.nSpace_global = self.u[0].femSpace.nSpace_global # assume same space dim for all variables
self.nDOF_trial_element = [u_j.femSpace.max_nDOF_element for u_j in list(self.u.values())]
self.nDOF_phi_trial_element = [phi_k.femSpace.max_nDOF_element for phi_k in list(self.phi.values())]
self.n_phi_ip_element = [phi_k.femSpace.referenceFiniteElement.interpolationConditions.nQuadraturePoints for phi_k in list(self.phi.values())]
self.nDOF_test_element = [femSpace.max_nDOF_element for femSpace in list(self.testSpace.values())]
self.nFreeDOF_global = [dc.nFreeDOF_global for dc in list(self.dirichletConditions.values())]
self.nVDOF_element = sum(self.nDOF_trial_element)
self.nFreeVDOF_global = sum(self.nFreeDOF_global)
#
proteus.NonlinearSolvers.NonlinearEquation.__init__(self, self.nFreeVDOF_global)
#
# build the quadrature point dictionaries from the input (this
# is just for convenience so that the input doesn't have to be
# complete)
#
elementQuadratureDict = {}
elemQuadIsDict = isinstance(elementQuadrature, dict)
if elemQuadIsDict: # set terms manually
for I in self.coefficients.elementIntegralKeys:
if I in elementQuadrature:
elementQuadratureDict[I] = elementQuadrature[I]
else:
elementQuadratureDict[I] = elementQuadrature['default']
else:
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[I] = elementQuadrature
if self.stabilization is not None:
for I in self.coefficients.elementIntegralKeys:
if elemQuadIsDict:
if I in elementQuadrature:
elementQuadratureDict[('stab',) + I[1:]] = elementQuadrature[I]
else:
elementQuadratureDict[('stab',) + I[1:]] = elementQuadrature['default']
else:
elementQuadratureDict[('stab',) + I[1:]] = elementQuadrature
if self.shockCapturing is not None:
for ci in self.shockCapturing.components:
if elemQuadIsDict:
if ('numDiff', ci, ci) in elementQuadrature:
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature[('numDiff', ci, ci)]
else:
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature['default']
else:
elementQuadratureDict[('numDiff', ci, ci)] = elementQuadrature
if massLumping:
for ci in list(self.coefficients.mass.keys()):
elementQuadratureDict[('m', ci)] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[('stab',) + I[1:]] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
if reactionLumping:
for ci in list(self.coefficients.mass.keys()):
elementQuadratureDict[('r', ci)] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
for I in self.coefficients.elementIntegralKeys:
elementQuadratureDict[('stab',) + I[1:]] = Quadrature.SimplexLobattoQuadrature(self.nSpace_global, 1)
elementBoundaryQuadratureDict = {}
if isinstance(elementBoundaryQuadrature, dict): # set terms manually
for I in self.coefficients.elementBoundaryIntegralKeys:
if I in elementBoundaryQuadrature:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature[I]
else:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature['default']
else:
for I in self.coefficients.elementBoundaryIntegralKeys:
elementBoundaryQuadratureDict[I] = elementBoundaryQuadrature
#
# find the union of all element quadrature points and
# build a quadrature rule for each integral that has a
# weight at each point in the union
# mwf include tag telling me which indices are which quadrature rule?
(self.elementQuadraturePoints, self.elementQuadratureWeights,
self.elementQuadratureRuleIndeces) = proteus.Quadrature.buildUnion(elementQuadratureDict)
self.nQuadraturePoints_element = self.elementQuadraturePoints.shape[0]
self.nQuadraturePoints_global = self.nQuadraturePoints_element * self.mesh.nElements_global
#
# Repeat the same thing for the element boundary quadrature
#
(self.elementBoundaryQuadraturePoints,
self.elementBoundaryQuadratureWeights,
self.elementBoundaryQuadratureRuleIndeces) = proteus.Quadrature.buildUnion(elementBoundaryQuadratureDict)
self.nElementBoundaryQuadraturePoints_elementBoundary = self.elementBoundaryQuadraturePoints.shape[0]
self.nElementBoundaryQuadraturePoints_global = (self.mesh.nElements_global *
self.mesh.nElementBoundaries_element *
self.nElementBoundaryQuadraturePoints_elementBoundary)
# if isinstance(self.u[0].femSpace,C0_AffineLinearOnSimplexWithNodalBasis):
# print self.nQuadraturePoints_element
# if self.nSpace_global == 3:
# assert(self.nQuadraturePoints_element == 5)
# elif self.nSpace_global == 2:
# assert(self.nQuadraturePoints_element == 6)
# elif self.nSpace_global == 1:
# assert(self.nQuadraturePoints_element == 3)
#
# print self.nElementBoundaryQuadraturePoints_elementBoundary
# if self.nSpace_global == 3:
# assert(self.nElementBoundaryQuadraturePoints_elementBoundary == 4)
# elif self.nSpace_global == 2:
# assert(self.nElementBoundaryQuadraturePoints_elementBoundary == 4)
# elif self.nSpace_global == 1:
# assert(self.nElementBoundaryQuadraturePoints_elementBoundary == 1)
#
# storage dictionaries
self.scalars_element = set()
#
# simplified allocations for test==trial and also check if space is mixed or not
#
self.q = {}
self.ebq = {}
self.ebq_global = {}
self.ebqe = {}
self.phi_ip = {}
# mesh
#self.q['x'] = np.zeros((self.mesh.nElements_global,self.nQuadraturePoints_element,3),'d')
self.ebqe['x'] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary, 3), 'd')
self.ebqe['n'] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global,
self.nElementBoundaryQuadraturePoints_elementBoundary,
self.nSpace_global),
'd')
self.q[('u', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('grad(u)', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, self.nSpace_global), 'd')
#diffusion, isotropic
self.q[('a', 0, 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, self.nSpace_global), 'd')
self.q[('da', 0, 0, 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element, self.nSpace_global), 'd')
# linear potential
self.q[('phi', 0)] = self.q[('u', 0)]
self.q[('grad(phi)', 0)] = self.q[('grad(u)', 0)]
self.q[('dphi', 0, 0)] = np.ones((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
# mass
self.q[('m', 0)] = self.q[('u', 0)]
self.q[('m_last', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('m_tmp', 0)] = self.q[('u', 0)]
self.q[('cfl', 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.q[('numDiff', 0, 0)] = np.zeros((self.mesh.nElements_global, self.nQuadraturePoints_element), 'd')
self.ebqe[('u', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('grad(u)', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global,
self.nElementBoundaryQuadraturePoints_elementBoundary, self.nSpace_global), 'd')
self.ebqe[('advectiveFlux_bc_flag', 0)] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'i')
self.ebqe[('advectiveFlux_bc', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('advectiveFlux', 0)] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('diffusiveFlux_bc_flag', 0, 0)] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'i')
self.ebqe[('diffusiveFlux_bc', 0, 0)] = np.zeros(
(self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.ebqe[('penalty')] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.points_elementBoundaryQuadrature = set()
self.scalars_elementBoundaryQuadrature = set([('u', ci) for ci in range(self.nc)])
self.vectors_elementBoundaryQuadrature = set()
self.tensors_elementBoundaryQuadrature = set()
self.inflowBoundaryBC = {}
self.inflowBoundaryBC_values = {}
self.inflowFlux = {}
for cj in range(self.nc):
self.inflowBoundaryBC[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global,), 'i')
self.inflowBoundaryBC_values[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nDOF_trial_element[cj]), 'd')
self.inflowFlux[cj] = np.zeros((self.mesh.nExteriorElementBoundaries_global, self.nElementBoundaryQuadraturePoints_elementBoundary), 'd')
self.internalNodes = set(range(self.mesh.nNodes_global))
# identify the internal nodes this is ought to be in mesh
# \todo move this to mesh
for ebNE in range(self.mesh.nExteriorElementBoundaries_global):
ebN = self.mesh.exteriorElementBoundariesArray[ebNE]
eN_global = self.mesh.elementBoundaryElementsArray[ebN, 0]
ebN_element = self.mesh.elementBoundaryLocalElementBoundariesArray[ebN, 0]
for i in range(self.mesh.nNodes_element):
if i != ebN_element:
I = self.mesh.elementNodesArray[eN_global, i]
self.internalNodes -= set([I])
self.nNodes_internal = len(self.internalNodes)
self.internalNodesArray = np.zeros((self.nNodes_internal,), 'i')
for nI, n in enumerate(self.internalNodes):
self.internalNodesArray[nI] = n
#
del self.internalNodes
self.internalNodes = None
prof.logEvent("Updating local to global mappings", 2)
self.updateLocal2Global()
prof.logEvent("Building time integration object", 2)
prof.logEvent(prof.memory("inflowBC, internalNodes,updateLocal2Global", "OneLevelTransport"), level=4)
# mwf for interpolating subgrid error for gradients etc
if self.stabilization and self.stabilization.usesGradientStabilization:
self.timeIntegration = TimeIntegrationClass(self, integrateInterpolationPoints=True)
else:
self.timeIntegration = TimeIntegrationClass(self)
if options is not None:
self.timeIntegration.setFromOptions(options)
prof.logEvent(prof.memory("TimeIntegration", "OneLevelTransport"), level=4)
prof.logEvent("Calculating numerical quadrature formulas", 2)
self.calculateQuadrature()
self.setupFieldStrides()
comm = proteus.Comm.get()
self.comm = comm
if comm.size() > 1:
assert numericalFluxType is not None and numericalFluxType.useWeakDirichletConditions, "You must use a numerical flux to apply weak boundary conditions for parallel runs"
prof.logEvent(prof.memory("stride+offset", "OneLevelTransport"), level=4)
if numericalFluxType is not None:
if options is None or options.periodicDirichletConditions is None:
self.numericalFlux = numericalFluxType(self,
dofBoundaryConditionsSetterDict,
advectiveFluxBoundaryConditionsSetterDict,
diffusiveFluxBoundaryConditionsSetterDictDict)
else:
self.numericalFlux = numericalFluxType(self,
dofBoundaryConditionsSetterDict,
advectiveFluxBoundaryConditionsSetterDict,
diffusiveFluxBoundaryConditionsSetterDictDict,
options.periodicDirichletConditions)
else:
self.numericalFlux = None
# set penalty terms
# cek todo move into numerical flux initialization
if 'penalty' in self.ebq_global:
for ebN in range(self.mesh.nElementBoundaries_global):
for k in range(self.nElementBoundaryQuadraturePoints_elementBoundary):
self.ebq_global['penalty'][ebN, k] = old_div(self.numericalFlux.penalty_constant, \
(self.mesh.elementBoundaryDiametersArray[ebN]**self.numericalFlux.penalty_power))
# penalty term
# cek move to Numerical flux initialization
if 'penalty' in self.ebqe:
for ebNE in range(self.mesh.nExteriorElementBoundaries_global):
ebN = self.mesh.exteriorElementBoundariesArray[ebNE]
for k in range(self.nElementBoundaryQuadraturePoints_elementBoundary):
self.ebqe['penalty'][ebNE, k] = old_div(self.numericalFlux.penalty_constant, \
self.mesh.elementBoundaryDiametersArray[ebN]**self.numericalFlux.penalty_power)
prof.logEvent(prof.memory("numericalFlux", "OneLevelTransport"), level=4)
self.elementEffectiveDiametersArray = self.mesh.elementInnerDiametersArray
# use post processing tools to get conservative fluxes, None by default
from proteus import PostProcessingTools
self.velocityPostProcessor = PostProcessingTools.VelocityPostProcessingChooser(self)
prof.logEvent(prof.memory("velocity postprocessor", "OneLevelTransport"), level=4)
# helper for writing out data storage
from proteus import Archiver
self.elementQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.elementBoundaryQuadratureDictionaryWriter = Archiver.XdmfWriter()
self.exteriorElementBoundaryQuadratureDictionaryWriter = Archiver.XdmfWriter()
# TODO get rid of this
# mwf can I use the numericalFlux's flag information?
for ci, fbcObject in list(self.fluxBoundaryConditionsObjectsDict.items()):
self.ebqe[('advectiveFlux_bc_flag', ci)] = np.zeros(self.ebqe[('advectiveFlux_bc', ci)].shape, 'i')
for t, g in list(fbcObject.advectiveFluxBoundaryConditionsDict.items()):
if ci in self.coefficients.advection:
self.ebqe[('advectiveFlux_bc', ci)][t[0], t[1]] = g(self.ebqe[('x')][t[0], t[1]], self.timeIntegration.t)
self.ebqe[('advectiveFlux_bc_flag', ci)][t[0], t[1]] = 1
for ck, diffusiveFluxBoundaryConditionsDict in list(fbcObject.diffusiveFluxBoundaryConditionsDictDict.items()):
self.ebqe[('diffusiveFlux_bc_flag', ck, ci)] = np.zeros(self.ebqe[('diffusiveFlux_bc', ck, ci)].shape, 'i')
for t, g in list(diffusiveFluxBoundaryConditionsDict.items()):
self.ebqe[('diffusiveFlux_bc', ck, ci)][t[0], t[1]] = g(self.ebqe[('x')][t[0], t[1]], self.timeIntegration.t)
self.ebqe[('diffusiveFlux_bc_flag', ck, ci)][t[0], t[1]] = 1
if hasattr(self.numericalFlux, 'setDirichletValues'):
self.numericalFlux.setDirichletValues(self.ebqe)
if not hasattr(self.numericalFlux, 'isDOFBoundary'):
self.numericalFlux.isDOFBoundary = {0: np.zeros(self.ebqe[('u', 0)].shape, 'i')}
if not hasattr(self.numericalFlux, 'ebqe'):
self.numericalFlux.ebqe = {('u', 0): np.zeros(self.ebqe[('u', 0)].shape, 'd')}
# TODO how to handle redistancing calls for calculateCoefficients,calculateElementResidual etc
self.globalResidualDummy = None
compKernelFlag = 0
if self.nSpace_global == 2:
self.dissipation = cDissipation2D_base(self.nSpace_global,
self.nQuadraturePoints_element,
self.u[0].femSpace.elementMaps.localFunctionSpace.dim,
self.u[0].femSpace.referenceFiniteElement.localFunctionSpace.dim,
self.testSpace[0].referenceFiniteElement.localFunctionSpace.dim,
self.nElementBoundaryQuadraturePoints_elementBoundary,
compKernelFlag,
self.coefficients.aDarcy,
self.coefficients.betaForch,
self.coefficients.grain,
self.coefficients.packFraction,
self.coefficients.packMargin,
self.coefficients.maxFraction,
self.coefficients.frFraction,
self.coefficients.sigmaC,
self.coefficients.C3e,
self.coefficients.C4e,
self.coefficients.eR,
self.coefficients.fContact,
self.coefficients.mContact,
self.coefficients.nContact,
self.coefficients.angFriction,
self.coefficients.vos_limiter,
self.coefficients.mu_fr_limiter)
else:
self.dissipation = cDissipation_base(self.nSpace_global,
self.nQuadraturePoints_element,
self.u[0].femSpace.elementMaps.localFunctionSpace.dim,
self.u[0].femSpace.referenceFiniteElement.localFunctionSpace.dim,
self.testSpace[0].referenceFiniteElement.localFunctionSpace.dim,
self.nElementBoundaryQuadraturePoints_elementBoundary,
compKernelFlag,
self.coefficients.aDarcy,
self.coefficients.betaForch,
self.coefficients.grain,
self.coefficients.packFraction,
self.coefficients.packMargin,
self.coefficients.maxFraction,
self.coefficients.frFraction,
self.coefficients.sigmaC,
self.coefficients.C3e,
self.coefficients.C4e,
self.coefficients.eR,
self.coefficients.fContact,
self.coefficients.mContact,
self.coefficients.nContact,
self.coefficients.angFriction,
self.coefficients.vos_limiter,
self.coefficients.mu_fr_limiter)
self.forceStrongConditions = False
if self.forceStrongConditions:
self.dirichletConditionsForceDOF = DOFBoundaryConditions(self.u[0].femSpace, dofBoundaryConditionsSetterDict[0], weakDirichletConditions=False)
if self.movingDomain:
self.MOVING_DOMAIN = 1.0
else:
self.MOVING_DOMAIN = 0.0
# cek hack
self.movingDomain = False
self.MOVING_DOMAIN = 0.0
if self.mesh.nodeVelocityArray is None:
self.mesh.nodeVelocityArray = np.zeros(self.mesh.nodeArray.shape, 'd')
# mwf these are getting called by redistancing classes,
def calculateCoefficients(self):
pass
def calculateElementResidual(self):
if self.globalResidualDummy is not None:
self.getResidual(self.u[0].dof, self.globalResidualDummy)
def getResidual(self, u, r):
import pdb
import copy
"""
Calculate the element residuals and add in to the global residual
"""
r.fill(0.0)
# Load the unknowns into the finite element dof
self.timeIntegration.calculateCoefs()
# print "***************max/min(m_last)*********************",max(self.timeIntegration.m_last[0].flat[:]),min(self.timeIntegration.m_last[0].flat[:])
# print "***************max/min(m_last)*********************",max(-self.timeIntegration.dt*self.timeIntegration.beta_bdf[0].flat[:]),min(-self.timeIntegration.dt*self.timeIntegration.beta_bdf[0].flat[:]),
self.timeIntegration.calculateU(u)
self.setUnknowns(self.timeIntegration.u)
# cek can put in logic to skip of BC's don't depend on t or u
# Dirichlet boundary conditions
# if hasattr(self.numericalFlux,'setDirichletValues'):
self.numericalFlux.setDirichletValues(self.ebqe)
# flux boundary conditions
for t, g in list(self.fluxBoundaryConditionsObjectsDict[0].advectiveFluxBoundaryConditionsDict.items()):
self.ebqe[('advectiveFlux_bc', 0)][t[0], t[1]] = g(self.ebqe[('x')][t[0], t[1]], self.timeIntegration.t)
self.ebqe[('advectiveFlux_bc_flag', 0)][t[0], t[1]] = 1
for ck, diffusiveFluxBoundaryConditionsDict in list(self.fluxBoundaryConditionsObjectsDict[0].diffusiveFluxBoundaryConditionsDictDict.items()):
for t, g in list(diffusiveFluxBoundaryConditionsDict.items()):
self.ebqe[('diffusiveFlux_bc', ck, 0)][t[0], t[1]] = g(self.ebqe[('x')][t[0], t[1]], self.timeIntegration.t)
self.ebqe[('diffusiveFlux_bc_flag', ck, 0)][t[0], t[1]] = 1
# self.shockCapturing.lag=True
if self.forceStrongConditions:
for dofN, g in list(self.dirichletConditionsForceDOF.DOFBoundaryConditionsDict.items()):
self.u[0].dof[dofN] = g(self.dirichletConditionsForceDOF.DOFBoundaryPointDict[dofN], self.timeIntegration.t)
#
# mwf debug
#import pdb
# pdb.set_trace()
argsDict = cArgumentsDict.ArgumentsDict()
argsDict["mesh_trial_ref"] = self.u[0].femSpace.elementMaps.psi
argsDict["mesh_grad_trial_ref"] = self.u[0].femSpace.elementMaps.grad_psi
argsDict["mesh_dof"] = self.mesh.nodeArray
argsDict["mesh_velocity_dof"] = self.mesh.nodeVelocityArray
argsDict["MOVING_DOMAIN"] = self.MOVING_DOMAIN
argsDict["mesh_l2g"] = self.mesh.elementNodesArray
argsDict["dV_ref"] = self.elementQuadratureWeights[('u', 0)]
argsDict["u_trial_ref"] = self.u[0].femSpace.psi
argsDict["u_grad_trial_ref"] = self.u[0].femSpace.grad_psi
argsDict["u_test_ref"] = self.u[0].femSpace.psi
argsDict["u_grad_test_ref"] = self.u[0].femSpace.grad_psi
argsDict["mesh_trial_trace_ref"] = self.u[0].femSpace.elementMaps.psi_trace
argsDict["mesh_grad_trial_trace_ref"] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict["dS_ref"] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict["u_trial_trace_ref"] = self.u[0].femSpace.psi_trace
argsDict["u_grad_trial_trace_ref"] = self.u[0].femSpace.grad_psi_trace
argsDict["u_test_trace_ref"] = self.u[0].femSpace.psi_trace
argsDict["u_grad_test_trace_ref"] = self.u[0].femSpace.grad_psi_trace
argsDict["normal_ref"] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict["boundaryJac_ref"] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict["nElements_global"] = self.mesh.nElements_global
argsDict["nu_0"] = self.coefficients.nu_0
argsDict["nu_1"] = self.coefficients.nu_1
argsDict["sigma_e"] = self.coefficients.sigma_e
argsDict["c_mu"] = self.coefficients.c_mu
argsDict["c_1"] = self.coefficients.c_1
argsDict["c_2"] = self.coefficients.c_2
argsDict["c_e"] = self.coefficients.c_e
argsDict["rho_0"] = self.coefficients.rho_0
argsDict["rho_1"] = self.coefficients.rho_1
argsDict["sedFlag"] = self.coefficients.sedFlag
argsDict["q_vos"] = self.coefficients.q_vos
argsDict["q_vos_gradc"] = self.coefficients.grad_vos
argsDict["ebqe_q_vos"] = self.coefficients.ebqe_vos
argsDict["ebqe_q_vos_gradc"] = self.coefficients.ebqe_grad_vos
argsDict["rho_f"] = self.coefficients.rho_0
argsDict["rho_s"] = self.coefficients.rho_s
argsDict["vs"] = self.coefficients.vs
argsDict["ebqe_vs"] = self.coefficients.ebqe_vs
argsDict["g"] = self.coefficients.g
argsDict["dissipation_model_flag"] = self.coefficients.dissipation_model_flag
argsDict["useMetrics"] = self.coefficients.useMetrics
argsDict["alphaBDF"] = self.timeIntegration.alpha_bdf
argsDict["lag_shockCapturing"] = self.shockCapturing.lag
argsDict["shockCapturingDiffusion"] = self.shockCapturing.shockCapturingFactor
argsDict["sc_uref"] = self.coefficients.sc_uref
argsDict["sc_alpha"] = self.coefficients.sc_beta
argsDict["u_l2g"] = self.u[0].femSpace.dofMap.l2g
argsDict["elementDiameter"] = self.mesh.elementDiametersArray
argsDict["u_dof"] = self.u[0].dof
argsDict["u_dof_old"] = self.coefficients.u_old_dof
argsDict["velocity"] = self.coefficients.q_v
argsDict["phi_ls"] = self.coefficients.q_phi
argsDict["q_kappa"] = self.coefficients.q_kappa
argsDict["q_grad_kappa"] = self.coefficients.q_grad_kappa
argsDict["q_porosity"] = self.coefficients.q_porosity
argsDict["velocity_dof_u"] = self.coefficients.velocity_dof_u
argsDict["velocity_dof_v"] = self.coefficients.velocity_dof_v
argsDict["velocity_dof_w"] = self.coefficients.velocity_dof_w
argsDict["q_m"] = self.timeIntegration.m_tmp[0]
argsDict["q_u"] = self.q[('u', 0)]
argsDict["q_grad_u"] = self.q[('grad(u)', 0)]
argsDict["q_m_betaBDF"] = self.timeIntegration.beta_bdf[0]
argsDict["cfl"] = self.q[('cfl', 0)]
argsDict["q_numDiff_u"] = self.shockCapturing.numDiff[0]
argsDict["q_numDiff_u_last"] = self.shockCapturing.numDiff_last[0]
argsDict["ebqe_penalty_ext"] = self.ebqe['penalty']
argsDict["offset_u"] = self.offset[0]
argsDict["stride_u"] = self.stride[0]
argsDict["globalResidual"] = r
argsDict["nExteriorElementBoundaries_global"] = self.mesh.nExteriorElementBoundaries_global
argsDict["exteriorElementBoundariesArray"] = self.mesh.exteriorElementBoundariesArray
argsDict["elementBoundaryElementsArray"] = self.mesh.elementBoundaryElementsArray
argsDict["elementBoundaryLocalElementBoundariesArray"] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict["ebqe_velocity_ext"] = self.coefficients.ebqe_v
argsDict["isDOFBoundary_u"] = self.numericalFlux.isDOFBoundary[0]
argsDict["ebqe_bc_u_ext"] = self.numericalFlux.ebqe[('u', 0)]
argsDict["isAdvectiveFluxBoundary_u"] = self.ebqe[('advectiveFlux_bc_flag', 0)]
argsDict["ebqe_bc_advectiveFlux_u_ext"] = self.ebqe[('advectiveFlux_bc', 0)]
argsDict["isDiffusiveFluxBoundary_u"] = self.ebqe[('diffusiveFlux_bc_flag', 0, 0)]
argsDict["ebqe_bc_diffusiveFlux_u_ext"] = self.ebqe[('diffusiveFlux_bc', 0, 0)]
argsDict["ebqe_phi"] = self.coefficients.ebqe_phi
argsDict["epsFact"] = self.coefficients.epsFact
argsDict["ebqe_kappa"] = self.coefficients.ebqe_kappa
argsDict["ebqe_porosity"] = self.coefficients.ebqe_porosity
argsDict["ebqe_u"] = self.ebqe[('u', 0)]
argsDict["ebqe_flux"] = self.ebqe[('advectiveFlux', 0)]
self.dissipation.calculateResidual(argsDict)
if self.forceStrongConditions:
for dofN, g in list(self.dirichletConditionsForceDOF.DOFBoundaryConditionsDict.items()):
r[dofN] = 0
if self.stabilization:
self.stabilization.accumulateSubgridMassHistory(self.q)
prof.logEvent("Global residual", level=9, data=r)
# mwf decide if this is reasonable for keeping solver statistics
self.nonlinear_function_evaluations += 1
if self.globalResidualDummy is None:
self.globalResidualDummy = np.zeros(r.shape, 'd')
def getJacobian(self, jacobian):
cfemIntegrals.zeroJacobian_CSR(self.nNonzerosInJacobian,
jacobian)
argsDict = cArgumentsDict.ArgumentsDict()
argsDict["mesh_trial_ref"] = self.u[0].femSpace.elementMaps.psi
argsDict["mesh_grad_trial_ref"] = self.u[0].femSpace.elementMaps.grad_psi
argsDict["mesh_dof"] = self.mesh.nodeArray
argsDict["mesh_velocity_dof"] = self.mesh.nodeVelocityArray
argsDict["MOVING_DOMAIN"] = self.MOVING_DOMAIN
argsDict["mesh_l2g"] = self.mesh.elementNodesArray
argsDict["dV_ref"] = self.elementQuadratureWeights[('u', 0)]
argsDict["u_trial_ref"] = self.u[0].femSpace.psi
argsDict["u_grad_trial_ref"] = self.u[0].femSpace.grad_psi
argsDict["u_test_ref"] = self.u[0].femSpace.psi
argsDict["u_grad_test_ref"] = self.u[0].femSpace.grad_psi
argsDict["mesh_trial_trace_ref"] = self.u[0].femSpace.elementMaps.psi_trace
argsDict["mesh_grad_trial_trace_ref"] = self.u[0].femSpace.elementMaps.grad_psi_trace
argsDict["dS_ref"] = self.elementBoundaryQuadratureWeights[('u', 0)]
argsDict["u_trial_trace_ref"] = self.u[0].femSpace.psi_trace
argsDict["u_grad_trial_trace_ref"] = self.u[0].femSpace.grad_psi_trace
argsDict["u_test_trace_ref"] = self.u[0].femSpace.psi_trace
argsDict["u_grad_test_trace_ref"] = self.u[0].femSpace.grad_psi_trace
argsDict["normal_ref"] = self.u[0].femSpace.elementMaps.boundaryNormals
argsDict["boundaryJac_ref"] = self.u[0].femSpace.elementMaps.boundaryJacobians
argsDict["nElements_global"] = self.mesh.nElements_global
argsDict["nu_0"] = self.coefficients.nu_0
argsDict["nu_1"] = self.coefficients.nu_1
argsDict["sigma_e"] = self.coefficients.sigma_e
argsDict["c_mu"] = self.coefficients.c_mu
argsDict["c_1"] = self.coefficients.c_1
argsDict["c_2"] = self.coefficients.c_2
argsDict["c_e"] = self.coefficients.c_e
argsDict["rho_0"] = self.coefficients.rho_0
argsDict["rho_1"] = self.coefficients.rho_1
argsDict["dissipation_model_flag"] = self.coefficients.dissipation_model_flag
argsDict["useMetrics"] = self.coefficients.useMetrics
argsDict["alphaBDF"] = self.timeIntegration.alpha_bdf
argsDict["lag_shockCapturing"] = self.shockCapturing.lag
argsDict["shockCapturingDiffusion"] = self.shockCapturing.shockCapturingFactor
argsDict["u_l2g"] = self.u[0].femSpace.dofMap.l2g
argsDict["elementDiameter"] = self.mesh.elementDiametersArray
argsDict["u_dof"] = self.u[0].dof
argsDict["u_dof_old"] = self.coefficients.u_old_dof
argsDict["velocity"] = self.coefficients.q_v
argsDict["phi_ls"] = self.coefficients.q_phi
argsDict["q_kappa"] = self.coefficients.q_kappa
argsDict["q_grad_kappa"] = self.coefficients.q_grad_kappa
argsDict["q_porosity"] = self.coefficients.q_porosity
argsDict["sedFlag"] = self.coefficients.sedFlag
argsDict["q_vos"] = self.coefficients.q_vos
argsDict["q_vos_gradc"] = self.coefficients.grad_vos
argsDict["ebqe_q_vos"] = self.coefficients.ebqe_vos
argsDict["ebqe_q_vos_gradc"] = self.coefficients.ebqe_grad_vos
argsDict["rho_f"] = self.coefficients.rho_0
argsDict["rho_s"] = self.coefficients.rho_s
argsDict["vs"] = self.coefficients.vs
argsDict["ebqe_vs"] = self.coefficients.ebqe_vs
argsDict["g"] = self.coefficients.g
argsDict["velocity_dof_u"] = self.coefficients.velocity_dof_u
argsDict["velocity_dof_v"] = self.coefficients.velocity_dof_v
argsDict["velocity_dof_w"] = self.coefficients.velocity_dof_w
argsDict["q_m_betaBDF"] = self.timeIntegration.beta_bdf[0]
argsDict["cfl"] = self.q[('cfl', 0)]
argsDict["q_numDiff_u_last"] = self.shockCapturing.numDiff_last[0]
argsDict["ebqe_penalty_ext"] = self.ebqe['penalty']
argsDict["csrRowIndeces_u_u"] = self.csrRowIndeces[(0, 0)]
argsDict["csrColumnOffsets_u_u"] = self.csrColumnOffsets[(0, 0)]
argsDict["globalJacobian"] = jacobian.getCSRrepresentation()[2]
argsDict["nExteriorElementBoundaries_global"] = self.mesh.nExteriorElementBoundaries_global
argsDict["exteriorElementBoundariesArray"] = self.mesh.exteriorElementBoundariesArray
argsDict["elementBoundaryElementsArray"] = self.mesh.elementBoundaryElementsArray
argsDict["elementBoundaryLocalElementBoundariesArray"] = self.mesh.elementBoundaryLocalElementBoundariesArray
argsDict["ebqe_velocity_ext"] = self.coefficients.ebqe_v
argsDict["isDOFBoundary_u"] = self.numericalFlux.isDOFBoundary[0]
argsDict["ebqe_bc_u_ext"] = self.numericalFlux.ebqe[('u', 0)]
argsDict["isAdvectiveFluxBoundary_u"] = self.ebqe[('advectiveFlux_bc_flag', 0)]
argsDict["ebqe_bc_advectiveFlux_u_ext"] = self.ebqe[('advectiveFlux_bc', 0)]
argsDict["isDiffusiveFluxBoundary_u"] = self.ebqe[('diffusiveFlux_bc_flag', 0, 0)]
argsDict["ebqe_bc_diffusiveFlux_u_ext"] = self.ebqe[('diffusiveFlux_bc', 0, 0)]
argsDict["csrColumnOffsets_eb_u_u"] = self.csrColumnOffsets_eb[(0, 0)]
argsDict["ebqe_phi"] = self.coefficients.ebqe_phi
argsDict["epsFact"] = self.coefficients.epsFact
argsDict["ebqe_kappa"] = self.coefficients.ebqe_kappa
argsDict["ebqe_porosity"] = self.coefficients.ebqe_porosity
self.dissipation.calculateJacobian(argsDict) # VRANS
# Load the Dirichlet conditions directly into residual
if self.forceStrongConditions:
scaling = 1.0 # probably want to add some scaling to match non-dirichlet diagonals in linear system
for dofN in list(self.dirichletConditionsForceDOF.DOFBoundaryConditionsDict.keys()):
global_dofN = dofN
for i in range(self.rowptr[global_dofN], self.rowptr[global_dofN + 1]):
if (self.colind[i] == global_dofN):
# print "RBLES forcing residual cj = %s dofN= %s global_dofN= %s was self.nzval[i]= %s now =%s " % (cj,dofN,global_dofN,self.nzval[i],scaling)
self.nzval[i] = scaling
else:
self.nzval[i] = 0.0
# print "RBLES zeroing residual cj = %s dofN= %s global_dofN= %s " % (cj,dofN,global_dofN)
prof.logEvent("Jacobian ", level=10, data=jacobian)
# mwf decide if this is reasonable for solver statistics
self.nonlinear_function_jacobian_evaluations += 1
return jacobian
def calculateElementQuadrature(self):
"""
Calculate the physical location and weights of the quadrature rules
and the shape information at the quadrature points.
This function should be called only when the mesh changes.
"""
# self.u[0].femSpace.elementMaps.getValues(self.elementQuadraturePoints,
# self.q['x'])
self.u[0].femSpace.elementMaps.getBasisValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.elementMaps.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.getBasisValuesRef(self.elementQuadraturePoints)
self.u[0].femSpace.getBasisGradientValuesRef(self.elementQuadraturePoints)
self.coefficients.initializeElementQuadrature(self.timeIntegration.t, self.q)
if self.stabilization is not None:
self.stabilization.initializeElementQuadrature(self.mesh, self.timeIntegration.t, self.q)
self.stabilization.initializeTimeIntegration(self.timeIntegration)
if self.shockCapturing is not None:
self.shockCapturing.initializeElementQuadrature(self.mesh, self.timeIntegration.t, self.q)
def calculateElementBoundaryQuadrature(self):
pass
def calculateExteriorElementBoundaryQuadrature(self):
"""
Calculate the physical location and weights of the quadrature rules
and the shape information at the quadrature points on global element boundaries.
This function should be called only when the mesh changes.
"""
#
# get physical locations of element boundary quadrature points
#
# assume all components live on the same mesh
self.u[0].femSpace.elementMaps.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.elementMaps.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.getBasisValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.getBasisGradientValuesTraceRef(self.elementBoundaryQuadraturePoints)
self.u[0].femSpace.elementMaps.getValuesGlobalExteriorTrace(self.elementBoundaryQuadraturePoints,
self.ebqe['x'])
self.fluxBoundaryConditionsObjectsDict = dict([(cj, proteus.FemTools.FluxBoundaryConditions(self.mesh,
self.nElementBoundaryQuadraturePoints_elementBoundary,
self.ebqe[('x')],
getAdvectiveFluxBoundaryConditions=self.advectiveFluxBoundaryConditionsSetterDict[cj],
getDiffusiveFluxBoundaryConditions=self.diffusiveFluxBoundaryConditionsSetterDictDict[cj]))
for cj in list(self.advectiveFluxBoundaryConditionsSetterDict.keys())])
self.coefficients.initializeGlobalExteriorElementBoundaryQuadrature(self.timeIntegration.t, self.ebqe)
def estimate_mt(self):
pass
def calculateSolutionAtQuadrature(self):
pass
def calculateAuxiliaryQuantitiesAfterStep(self):
pass
|
mit
| -5,397,842,456,893,396,000
| 53.566795
| 238
| 0.570205
| false
| 3.718571
| true
| false
| false
|
jaekor91/xd-elg-scripts
|
produce-DECaLS-DR3-Tractor-DEEP2f234.py
|
1
|
4558
|
# Loading modules
import numpy as np
from os import listdir
from os.path import isfile, join
from astropy.io import ascii, fits
from astropy.wcs import WCS
import numpy.lib.recfunctions as rec
from xd_elg_utils import *
import sys
large_random_constant = -999119283571
deg2arcsec=3600
data_directory = "./"
# True if tractor files have been already downloaded.
tractor_file_downloaded = True
##############################################################################
if not tractor_file_downloaded: # If the tractor files are not downloaded.
print("1. Generate download scripts for relevant Tractor files.")
print("This step generates three files that the user can use to download\n\
the relevant tractor files.")
print("To identify relevant bricks use survey-bricks-dr3.fits which the user\n\
should have downloaded. Approximate field ranges.\n\
\n\
Field 2\n\
RA bounds: [251.3, 253.7]\n\
DEC bounds: [34.6, 35.3]\n\
\n\
Field 3\n\
RA bounds: [351.25, 353.8]\n\
DEC bounds: [-.2, .5]\n\
\n\
Field 4\n\
RA bounds: [36.4, 38]\n\
DEC bounds: [.3, 1.0]\n\
")
fits_bricks = fits.open(data_directory+"survey-bricks-dr3.fits")[1].data
ra = fits_bricks['ra'][:]
dec = fits_bricks['dec'][:]
br_name = fits_bricks['brickname'][:]
# Getting the brick names near the ranges specified below.
tol = 0.25
f2_bricks = return_bricknames(ra, dec, br_name,[251.3, 253.7],[34.6, 35.3],tol)
f3_bricks = return_bricknames(ra, dec, br_name,[351.25, 353.8],[-.2, .5],tol)
f4_bricks = return_bricknames(ra, dec, br_name,[36.4,38.],[.3, 1.0],tol)
bricks = [f2_bricks, f3_bricks, f4_bricks]
print("Generating download scripts. DR3-DEEP2f**-tractor-download.sh")
portal_address = "http://portal.nersc.gov/project/cosmo/data/legacysurvey/dr3/tractor/"
postfix = ".fits\n"
prefix = "wget "
for i in range(3):
f = open("DR3-DEEP2f%d-tractor-download.sh"%(i+2),"w")
for brick in bricks[i]:
tractor_directory = brick[:3]
brick_address = tractor_directory+"/tractor-"+brick+postfix
download_command = prefix + portal_address + brick_address
f.write(download_command)
f.close()
print("Completed")
print("Exiting the program. Please download the necessary files using the script\n\
and re-run the program with tractor_file_downloaded=True.")
sys.exit()
else:
print("Proceeding using the downloaded tractor files.")
print("Within data_directory, Tractor files should be \n\
saved in directories in \DR3-f**\.")
##############################################################################
print("2. Combine all Tractor files by field, append Tycho-2 stellar mask column, \n\
and mask objects using DEEP2 window funtions.")
print("2a. Combining the tractor files: Impose mask conditions (brick_primary==True\n\
and flux inverse variance positive).")
# Field 2
DR3f2 = combine_tractor(data_directory+"DR3-f2/")
# Field 3
DR3f3 = combine_tractor(data_directory+"DR3-f3/")
# Field 4
DR3f4 = combine_tractor(data_directory+"DR3-f4/")
print("Completed.")
print("2b. Append Tycho2 stark mask field.")
# Field 2
DR3f2 = apply_tycho(DR3f2,"tycho2.fits",galtype="ELG")
# Field 3
DR3f3 = apply_tycho(DR3f3,"tycho2.fits",galtype="ELG")
# Field 4
DR3f4 = apply_tycho(DR3f4,"tycho2.fits",galtype="ELG")
print("Completed.")
print("2c. Impose DEEP2 window functions.")
# Field 2
idx = np.logical_or(window_mask(DR3f2["ra"], DR3f2["dec"], "windowf.21.fits"), window_mask(DR3f2["ra"], DR3f2["dec"], "windowf.22.fits"))
DR3f2_trimmed = DR3f2[idx]
# Field 3
idx = np.logical_or.reduce((window_mask(DR3f3["ra"], DR3f3["dec"], "windowf.31.fits"), window_mask(DR3f3["ra"], DR3f3["dec"], "windowf.32.fits"),window_mask(DR3f3["ra"], DR3f3["dec"], "windowf.33.fits")))
DR3f3_trimmed = DR3f3[idx]
# Field 4
idx = np.logical_or(window_mask(DR3f4["ra"], DR3f4["dec"], "windowf.41.fits"), window_mask(DR3f4["ra"], DR3f4["dec"], "windowf.42.fits"))
DR3f4_trimmed = np.copy(DR3f4[idx])
print("Completed.")
##############################################################################
print("3. Save the trimmed catalogs.")
# Field 2
cols = fits.ColDefs(DR3f2_trimmed)
tbhdu = fits.BinTableHDU.from_columns(cols)
tbhdu.writeto('DECaLS-DR3-Tractor-DEEP2f2.fits', clobber=True)
# Field 3
cols = fits.ColDefs(DR3f3_trimmed)
tbhdu = fits.BinTableHDU.from_columns(cols)
tbhdu.writeto('DECaLS-DR3-Tractor-DEEP2f3.fits', clobber=True)
# Field 4
cols = fits.ColDefs(DR3f4_trimmed)
tbhdu = fits.BinTableHDU.from_columns(cols)
tbhdu.writeto('DECaLS-DR3-Tractor-DEEP2f4.fits', clobber=True)
print("Completed.")
|
gpl-3.0
| -1,165,579,882,566,895,900
| 33.793893
| 204
| 0.667617
| false
| 2.737538
| false
| false
| false
|
GoogleCloudPlatform/sap-deployment-automation
|
third_party/github.com/ansible/awx/awx/main/managers.py
|
1
|
10952
|
# Copyright (c) 2015 Ansible, Inc.
# All Rights Reserved.
import sys
import logging
import os
from django.db import models
from django.conf import settings
from awx.main.utils.filters import SmartFilter
from awx.main.utils.pglock import advisory_lock
___all__ = ['HostManager', 'InstanceManager', 'InstanceGroupManager']
logger = logging.getLogger('awx.main.managers')
class HostManager(models.Manager):
"""Custom manager class for Hosts model."""
def active_count(self):
"""Return count of active, unique hosts for licensing.
Construction of query involves:
- remove any ordering specified in model's Meta
- Exclude hosts sourced from another Tower
- Restrict the query to only return the name column
- Only consider results that are unique
- Return the count of this query
"""
return self.order_by().exclude(inventory_sources__source='tower').values('name').distinct().count()
def org_active_count(self, org_id):
"""Return count of active, unique hosts used by an organization.
Construction of query involves:
- remove any ordering specified in model's Meta
- Exclude hosts sourced from another Tower
- Consider only hosts where the canonical inventory is owned by the organization
- Restrict the query to only return the name column
- Only consider results that are unique
- Return the count of this query
"""
return self.order_by().exclude(
inventory_sources__source='tower'
).filter(inventory__organization=org_id).values('name').distinct().count()
def get_queryset(self):
"""When the parent instance of the host query set has a `kind=smart` and a `host_filter`
set. Use the `host_filter` to generate the queryset for the hosts.
"""
qs = super(HostManager, self).get_queryset()
if (hasattr(self, 'instance') and
hasattr(self.instance, 'host_filter') and
hasattr(self.instance, 'kind')):
if self.instance.kind == 'smart' and self.instance.host_filter is not None:
q = SmartFilter.query_from_string(self.instance.host_filter)
if self.instance.organization_id:
q = q.filter(inventory__organization=self.instance.organization_id)
# If we are using host_filters, disable the core_filters, this allows
# us to access all of the available Host entries, not just the ones associated
# with a specific FK/relation.
#
# If we don't disable this, a filter of {'inventory': self.instance} gets automatically
# injected by the related object mapper.
self.core_filters = {}
qs = qs & q
return qs.order_by('name', 'pk').distinct('name')
return qs
def get_ig_ig_mapping(ig_instance_mapping, instance_ig_mapping):
# Create IG mapping by union of all groups their instances are members of
ig_ig_mapping = {}
for group_name in ig_instance_mapping.keys():
ig_ig_set = set()
for instance_hostname in ig_instance_mapping[group_name]:
ig_ig_set |= instance_ig_mapping[instance_hostname]
else:
ig_ig_set.add(group_name) # Group contains no instances, return self
ig_ig_mapping[group_name] = ig_ig_set
return ig_ig_mapping
class InstanceManager(models.Manager):
"""A custom manager class for the Instance model.
Provides "table-level" methods including getting the currently active
instance or role.
"""
def me(self):
"""Return the currently active instance."""
# If we are running unit tests, return a stub record.
if settings.IS_TESTING(sys.argv) or hasattr(sys, '_called_from_test'):
return self.model(id=1,
hostname='localhost',
uuid='00000000-0000-0000-0000-000000000000')
node = self.filter(hostname=settings.CLUSTER_HOST_ID)
if node.exists():
return node[0]
raise RuntimeError("No instance found with the current cluster host id")
def register(self, uuid=None, hostname=None, ip_address=None):
if not uuid:
uuid = settings.SYSTEM_UUID
if not hostname:
hostname = settings.CLUSTER_HOST_ID
with advisory_lock('instance_registration_%s' % hostname):
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
# detect any instances with the same IP address.
# if one exists, set it to None
inst_conflicting_ip = self.filter(ip_address=ip_address).exclude(hostname=hostname)
if inst_conflicting_ip.exists():
for other_inst in inst_conflicting_ip:
other_hostname = other_inst.hostname
other_inst.ip_address = None
other_inst.save(update_fields=['ip_address'])
logger.warning("IP address {0} conflict detected, ip address unset for host {1}.".format(ip_address, other_hostname))
instance = self.filter(hostname=hostname)
if instance.exists():
instance = instance.get()
if instance.ip_address != ip_address:
instance.ip_address = ip_address
instance.save(update_fields=['ip_address'])
return (True, instance)
else:
return (False, instance)
instance = self.create(uuid=uuid,
hostname=hostname,
ip_address=ip_address,
capacity=0)
return (True, instance)
def get_or_register(self):
if settings.AWX_AUTO_DEPROVISION_INSTANCES:
from awx.main.management.commands.register_queue import RegisterQueue
pod_ip = os.environ.get('MY_POD_IP')
registered = self.register(ip_address=pod_ip)
RegisterQueue('tower', None, 100, 0, []).register()
return registered
else:
return (False, self.me())
def active_count(self):
"""Return count of active Tower nodes for licensing."""
return self.all().count()
def my_role(self):
# NOTE: TODO: Likely to repurpose this once standalone ramparts are a thing
return "tower"
def all_non_isolated(self):
return self.exclude(rampart_groups__controller__isnull=False)
class InstanceGroupManager(models.Manager):
"""A custom manager class for the Instance model.
Used for global capacity calculations
"""
def capacity_mapping(self, qs=None):
"""
Another entry-point to Instance manager method by same name
"""
if qs is None:
qs = self.all().prefetch_related('instances')
instance_ig_mapping = {}
ig_instance_mapping = {}
# Create dictionaries that represent basic m2m memberships
for group in qs:
ig_instance_mapping[group.name] = set(
instance.hostname for instance in group.instances.all() if
instance.capacity != 0
)
for inst in group.instances.all():
if inst.capacity == 0:
continue
instance_ig_mapping.setdefault(inst.hostname, set())
instance_ig_mapping[inst.hostname].add(group.name)
# Get IG capacity overlap mapping
ig_ig_mapping = get_ig_ig_mapping(ig_instance_mapping, instance_ig_mapping)
return instance_ig_mapping, ig_ig_mapping
@staticmethod
def zero_out_group(graph, name, breakdown):
if name not in graph:
graph[name] = {}
graph[name]['consumed_capacity'] = 0
if breakdown:
graph[name]['committed_capacity'] = 0
graph[name]['running_capacity'] = 0
def capacity_values(self, qs=None, tasks=None, breakdown=False, graph=None):
"""
Returns a dictionary of capacity values for all IGs
"""
if qs is None: # Optionally BYOQS - bring your own queryset
qs = self.all().prefetch_related('instances')
instance_ig_mapping, ig_ig_mapping = self.capacity_mapping(qs=qs)
if tasks is None:
tasks = self.model.unifiedjob_set.related.related_model.objects.filter(
status__in=('running', 'waiting'))
if graph is None:
graph = {group.name: {} for group in qs}
for group_name in graph:
self.zero_out_group(graph, group_name, breakdown)
for t in tasks:
# TODO: dock capacity for isolated job management tasks running in queue
impact = t.task_impact
if t.status == 'waiting' or not t.execution_node:
# Subtract capacity from any peer groups that share instances
if not t.instance_group:
impacted_groups = []
elif t.instance_group.name not in ig_ig_mapping:
# Waiting job in group with 0 capacity has no collateral impact
impacted_groups = [t.instance_group.name]
else:
impacted_groups = ig_ig_mapping[t.instance_group.name]
for group_name in impacted_groups:
if group_name not in graph:
self.zero_out_group(graph, group_name, breakdown)
graph[group_name]['consumed_capacity'] += impact
if breakdown:
graph[group_name]['committed_capacity'] += impact
elif t.status == 'running':
# Subtract capacity from all groups that contain the instance
if t.execution_node not in instance_ig_mapping:
if not t.is_containerized:
logger.warning('Detected %s running inside lost instance, '
'may still be waiting for reaper.', t.log_format)
if t.instance_group:
impacted_groups = [t.instance_group.name]
else:
impacted_groups = []
else:
impacted_groups = instance_ig_mapping[t.execution_node]
for group_name in impacted_groups:
if group_name not in graph:
self.zero_out_group(graph, group_name, breakdown)
graph[group_name]['consumed_capacity'] += impact
if breakdown:
graph[group_name]['running_capacity'] += impact
else:
logger.error('Programming error, %s not in ["running", "waiting"]', t.log_format)
return graph
|
apache-2.0
| 6,251,329,385,971,334,000
| 42.633466
| 141
| 0.582816
| false
| 4.444805
| false
| false
| false
|
mferenca/HMS-ecommerce
|
ecommerce/extensions/basket/app.py
|
1
|
1028
|
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from oscar.apps.basket import app
from oscar.core.loading import get_class
class BasketApplication(app.BasketApplication):
single_item_view = get_class('basket.views', 'BasketSingleItemView')
summary_view = get_class('basket.views', 'BasketSummaryView')
def get_urls(self):
urls = [
url(r'^$', self.summary_view.as_view(), name='summary'),
url(r'^add/(?P<pk>\d+)/$', self.add_view.as_view(), name='add'),
url(r'^vouchers/add/$', self.add_voucher_view.as_view(), name='vouchers-add'),
url(r'^vouchers/(?P<pk>\d+)/remove/$', self.remove_voucher_view.as_view(), name='vouchers-remove'),
url(r'^saved/$', login_required(self.saved_view.as_view()), name='saved'),
url(r'^single-item/$', login_required(self.single_item_view.as_view()), name='single-item'),
]
return self.post_process_urls(urls)
application = BasketApplication()
|
agpl-3.0
| 4,891,847,817,165,289,000
| 43.695652
| 111
| 0.644942
| false
| 3.381579
| false
| false
| false
|
azoft-dev-team/imagrium
|
src/pages/bottom_navigation.py
|
1
|
1266
|
from src.core.page import ResourceLoader, Page
from src.core.r import Resource
from src.pages.explore import Explore
from src.pages.me.me import Me
class BottomNavigation(Page):
meNavIconInactive = ResourceLoader(Resource.meNavIconInactive)
meNavIconActive = ResourceLoader(Resource.meNavIconActive)
exploreNavIconInactive = ResourceLoader(Resource.exploreNavIconInactive)
exploreNavIconActive = ResourceLoader(Resource.exploreNavIconActive)
def __init__(self, box, settings):
super(Page, self).__init__(box, settings)
self.box = box
self.settings = settings
# It is necessary to assign a search area to all class fields
self.meNavIconInactive = self.box
self.meNavIconActive = self.box
def actionGoMe(self, inactive=True):
if inactive:
self.meNavIconInactive.click()
else:
self.meNavIconActive.click()
return Me.load(self.box, self.settings)
def actionGoExplore(self, inactive=True):
if inactive:
self.exploreNavIconInactive.click()
else:
self.exploreNavIconActive.click()
return Explore.load(self.box, self.settings)
class BottomNavigationiOS(BottomNavigation):
pass
|
mit
| 1,978,720,015,273,471,700
| 30.65
| 76
| 0.691943
| false
| 3.95625
| false
| false
| false
|
cdent/tiddlywebplugins.policyfilter
|
test/test_filter.py
|
1
|
2381
|
from tiddlyweb.filters import FilterError, recursive_filter, parse_for_filters
from tiddlyweb.model.tiddler import Tiddler
from tiddlyweb.model.bag import Bag
from tiddlyweb.model.recipe import Recipe
from tiddlyweb.store import Store
from tiddlywebplugins.policyfilter import init
from tiddlyweb.config import config
import pytest
def setup_module(module):
init(config)
environ = {
'tiddlyweb.config': config,
'tiddlyweb.usersign': {'name': 'cdent', 'roles': ['COW', 'MOO']}
}
module.store = Store(config['server_store'][0],
config['server_store'][1],
environ)
environ['tiddlyweb.store'] = module.store
module.environ = environ
def test_filtering_bags():
bag1 = Bag('bag1')
bag1.policy.create = ['cdent']
bag2 = Bag('bag2')
bag2.policy.create = ['R:COW']
bag3 = Bag('bag3')
bag3.policy.create = []
bag4 = Bag('bag4')
bag4.policy.create = ['NONE']
bags = [bag1, bag2, bag3, bag4]
for bag in bags:
store.put(bag)
found_bags = list(filter('select=policy:create', bags))
assert len(found_bags) == 3
names = [bag.name for bag in found_bags]
assert 'bag1' in names
assert 'bag2' in names
assert 'bag3' in names
assert 'bag4' not in names
def test_filter_recipes():
recipe1 = Recipe('recipe1')
recipe1.policy.create = ['cdent']
recipe2 = Recipe('recipe2')
recipe2.policy.create = ['R:COW']
recipe3 = Recipe('recipe3')
recipe3.policy.create = []
recipe4 = Recipe('recipe4')
recipe4.policy.create = ['NONE']
recipes = [recipe1, recipe2, recipe3, recipe4]
for recipe in recipes:
store.put(recipe)
found_recipes = list(filter('select=policy:create', recipes))
assert len(found_recipes) == 3
names = [recipe.name for recipe in found_recipes]
assert 'recipe1' in names
assert 'recipe2' in names
assert 'recipe3' in names
assert 'recipe4' not in names
def test_filter_tiddlers():
"""
This should error.
"""
tiddler1 = Tiddler('tiddler1', 'bag1')
tiddler1.text = 'foo'
store.put(tiddler1)
with pytest.raises(AttributeError):
found_tiddlers = list(filter('select=policy:create', [tiddler1]))
def filter(filter_string, entities):
return recursive_filter(parse_for_filters(
filter_string, environ)[0], entities)
|
bsd-3-clause
| -800,240,816,916,698,600
| 25.164835
| 78
| 0.649727
| false
| 3.162019
| true
| false
| false
|
mjonescase/flask-truss
|
flask_truss/manage.py
|
1
|
1920
|
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
from flask_truss.factory import create_app
from flask_truss.conf.app import Config
from flask_truss.async.base import celery_instance
from flask_truss.models.base import db
config = Config()
app = create_app(config)
manager = Manager(app)
migrate = Migrate(app, db)
@manager.shell
def make_shell_context():
"""IPython session with app loaded"""
return dict(app=app)
@manager.option('-n', '--nose_arguments', dest='nose_arguments', required=False,
help="List of arguments to pass to nose. First argument MUST be ''",
default=['', '--with-coverage', '--cover-package=flask_truss'])
def test(nose_arguments):
"""Run nosetests with the given arguments and report coverage"""
assert nose_arguments[0] == ''
import nose
from nose.plugins.cover import Coverage
nose.main(argv=nose_arguments, addplugins=[Coverage()])
@manager.command
def runserver():
"""Run the Flask development server with the config's settings"""
app.run(port=config.PORT, debug=config.DEBUG, threaded=config.THREADED)
@manager.option('-Q', '--queues', dest='queues', required=False, default='celery',
help="Comma separated names of queues")
@manager.option('-c', '--concurrency', dest='concurrency', required=False, type=int, default=0,
help="Number of processes/threads the worker uses")
@manager.option('-l', '--loglevel', dest='loglevel', required=False, default='INFO',
help="DEBUG, INFO, WARN, ERROR, CRITICAL, FATAL")
def worker(queues, concurrency, loglevel=None):
"""Run a celery worker process locally"""
worker = celery_instance.Worker(queues=queues, concurrency=concurrency, loglevel=loglevel, **app.config)
worker.start()
manager.add_command('db', MigrateCommand)
if __name__ == "__main__":
manager.run()
|
mit
| -6,636,964,529,650,362,000
| 33.285714
| 108
| 0.690625
| false
| 3.80198
| true
| false
| false
|
slongfield/StereoCensus
|
verilog/census/argmin_gen.py
|
1
|
3581
|
# argmin_gen.py
#
# Takes in a single argument, the number of inputs, and generates a verilog
# armin tree, using the argmin_helper.
#
# Copyright (c) 2016, Stephen Longfield, stephenlongfield.com
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import argparse
# Header is a format string, expecting number of inputs as an argument.
_HEADER = """
`ifndef CENSUS_ARGMIN_{0}_V_
`define CENSUS_ARGMIN_{0}_V_
module argmin_{0}#(
parameter WIDTH=1
) (
input wire clk,
input wire rst,
input wire [WIDTH*{0}-1:0] inp,
output wire [WIDTH-1:0] outp,
output wire [$clog2({0})-1:0] outp_addr
);
localparam ADDR_WIDTH = $clog2({0});
"""
_FOOTER = """
endmodule
`endif // CENSUS_ARGMIN_V_
"""
_STAGE = """
argmin_helper#(.WIDTH(WIDTH), .ADDR_WIDTH(ADDR_WIDTH), .NUM_INP({num_inp}),
.NUM_OUTP({num_outp}), .STAGE({stage}))
ah_{stage}(clk, rst, {inp}, {inp_addr}, {outp}, {outp_addr});
"""
parser = argparse.ArgumentParser()
parser.add_argument(
"--num_inputs",
help="number of inputs in the generated argmin",
type=int,
required=True)
def get_args():
"""get_args parses the args with argparse.
Returns:
num_inputs (int): Number of inputs that was passed on the commmand line.
"""
args = parser.parse_args()
return args.num_inputs
def generate_argmin(num_inputs):
"""generate_argmin generates an argmin function
Args:
Returns:
argmin (string): verilog that computes the argmin function
"""
lines = []
lines.append(_HEADER.format(num_inputs))
# Pretend the inputs were the outputs from some imaginary previous stage.
prev_output = "inp"
prev_output_addr = "0"
stage = 0
input_size = num_inputs
while (input_size > 1):
output_size = input_size // 2 + input_size % 2
outp_name = "data_{}".format(stage)
outp_addr = "addr_{}".format(stage)
# Create some new output ports
lines.append(" wire [WIDTH*{}-1:0] {};".format(output_size,
outp_name))
lines.append(" wire [ADDR_WIDTH*{}-1:0] {};".format(output_size,
outp_addr))
lines.append(_STAGE.format(num_inp=input_size, num_outp=output_size,
stage=stage, inp=prev_output,
inp_addr=prev_output_addr,
outp=outp_name, outp_addr=outp_addr))
stage += 1
input_size = output_size
prev_output = outp_name
prev_output_addr = outp_addr
# Set up the outputs
lines.append(" assign outp = {};".format(prev_output))
lines.append(" assign outp_addr = {};".format(prev_output_addr))
lines.append(_FOOTER)
return "\n".join(lines)
def run():
num_inputs = get_args()
print(generate_argmin(num_inputs))
if __name__ == '__main__':
run()
|
gpl-3.0
| -7,825,781,145,096,044,000
| 27.879032
| 78
| 0.60458
| false
| 3.545545
| false
| false
| false
|
yaukwankiu/armor
|
geometry/fractal.py
|
1
|
1840
|
import time
import numpy as np
from .. import defaultParameters as dp
def hausdorffDim(a, epsilon=2):
"""
#codes from
# hausdorffDimensionTest.py
# http://en.wikipedia.org/wiki/Hausdorff_dimension
# http://en.wikipedia.org/wiki/Minkowski-Bouligand_dimension
"""
dims = []
arr1 = (a.matrix>0) # turn it to 0-1 if it's not that form already
height, width = arr1.shape
arr2 = arr1[::epsilon, ::epsilon].copy()
for i in range(0, epsilon):
for j in range(0, epsilon):
h, w = arr1[i::epsilon, j::epsilon].shape
arr2[0:h, 0:w] += arr1[i::epsilon, j::epsilon]
dimH = np.log(arr2.sum()) / np.log((height*width)**.5/epsilon)
return dimH
def hausdorffDimLocal(a, epsilon=1, I=50, J=50, display=True, imagePath=""):
height, width = a.matrix.shape
dimLocal = {}
a1 = a.hausdorffDim(epsilon)['a1']
for i in range(height//I):
for j in range(width//J):
aa1 = a1.getWindow(i*I, j*J, I, J)
# one epsilon for now, may extend to a list later 2014-07-29
dimH = hausdorffDim(aa1, epsilon)
aa1.name = str(dimH)
#aa1.show()
#time.sleep(1)
dimLocal[(i,j)] = dimH
#print dimH #debug
a2 = a.copy()
a2.matrix= a2.matrix.astype(float)
#a2.show() # debug
#time.sleep(5)
a2.name = "Local Hausdorff Dimensions for\n" + a.name
a2.imagePath = 'testing/' + str(time.time()) + '_local_hausdorff_dim_' + a.name[-19:] + '.png'
for i in range(height//I):
for j in range(width//J):
a2.matrix[i*I:(i+1)*I, j*J:(j+1)*J] = dimLocal[(i,j)]
a2.vmax=2
a2.vmin=0
a2.cmap='jet'
if imagePath !="":
a2.saveImage()
if display:
a2.show()
return {'a2': a2, 'dimLocal': dimLocal}
|
cc0-1.0
| -3,036,818,565,936,485,000
| 33.716981
| 98
| 0.561957
| false
| 2.88854
| false
| false
| false
|
GuessWhatGame/generic
|
preprocess_data/extract_img_features.py
|
1
|
3564
|
#!/usr/bin/env python
import numpy
import os
import tensorflow as tf
from multiprocessing import Pool
from tqdm import tqdm
import numpy as np
import h5py
from generic.data_provider.nlp_utils import DummyTokenizer
from generic.data_provider.iterator import Iterator
def extract_features(
img_input,
ft_output,
network_ckpt,
dataset_cstor,
dataset_args,
batchifier_cstor,
out_dir,
set_type,
batch_size,
no_threads,
gpu_ratio):
# CPU/GPU option
cpu_pool = Pool(no_threads, maxtasksperchild=1000)
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=gpu_ratio)
with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options, allow_soft_placement=True)) as sess:
saver = tf.train.Saver()
saver.restore(sess, network_ckpt)
for one_set in set_type:
print("Load dataset -> set: {}".format(one_set))
dataset_args["which_set"] = one_set
dataset = dataset_cstor(**dataset_args)
# hack dataset to only keep one game by image
image_id_set = {}
games = []
for game in dataset.games:
if game.image.id not in image_id_set:
games.append(game)
image_id_set[game.image.id] = 1
dataset.games = games
no_images = len(games)
#TODO find a more generic approach
if type(dataset.games[0].image.id) is int:
image_id_type = np.int64
else:
image_id_type = h5py.special_dtype(vlen=type(dataset.games[0].image.id))
source_name = os.path.basename(img_input.name[:-2])
dummy_tokenizer = DummyTokenizer()
batchifier = batchifier_cstor(tokenizer=dummy_tokenizer, sources=[source_name])
iterator = Iterator(dataset,
batch_size=batch_size,
pool=cpu_pool,
batchifier=batchifier)
############################
# CREATE FEATURES
############################
print("Start computing image features...")
if one_set == "all":
filepath = os.path.join(out_dir, "features.h5")
else:
filepath = os.path.join(out_dir, "{}_features.h5".format(one_set))
with h5py.File(filepath, 'w') as f:
ft_shape = [int(dim) for dim in ft_output.get_shape()[1:]]
ft_dataset = f.create_dataset('features', shape=[no_images] + ft_shape, dtype=np.float32)
idx2img = f.create_dataset('idx2img', shape=[no_images], dtype=image_id_type)
pt_hd5 = 0
i = 0
for batch in tqdm(iterator):
i += 1
feat = sess.run(ft_output, feed_dict={img_input: numpy.array(batch[source_name])})
# Store dataset
batch_size = len(batch["raw"])
ft_dataset[pt_hd5: pt_hd5 + batch_size] = feat
# Store idx to image.id
for i, game in enumerate(batch["raw"]):
idx2img[pt_hd5 + i] = game.image.id
# update hd5 pointer
pt_hd5 += batch_size
print("Start dumping file: {}".format(filepath))
print("Finished dumping file: {}".format(filepath))
print("Done!")
|
apache-2.0
| 6,149,952,111,916,122,000
| 33.941176
| 105
| 0.518799
| false
| 4.054608
| false
| false
| false
|
dmr/Ldtools
|
ldtools/cli.py
|
1
|
5600
|
from __future__ import print_function
import logging
import pprint
import datetime
import sys
import argparse
from ldtools.utils import (
is_valid_url,
get_slash_url,
get_rdflib_uriref,
urllib2,
)
from ldtools.helpers import set_colored_logger
from ldtools.backends import __version__
from ldtools.origin import Origin
from ldtools.resource import Resource
logger = logging.getLogger("ldtools.cli")
def get_parser():
parser = argparse.ArgumentParser()
parser.add_argument(
'--version', action='version', version='%(prog)s ' + __version__,
help="Print current version")
parser.add_argument(
'-v', '--verbosity', action="store",
help='Adjust verbosity. 1 for every detail, 5 for silent',
default=2, type=int)
parser.add_argument(
'-d', '--depth', action="store", default=0, type=int,
help="Crawl discovered Origins x times")
follow_group = parser.add_mutually_exclusive_group()
follow_group.add_argument(
'--follow-all', action="store_true",
help="Follow all URIs discovered")
follow_group.add_argument(
'--follow-uris',
action="append", dest='follow_uris', default=[],
help="Follow the URIs specified")
print_group = parser.add_mutually_exclusive_group()
print_group.add_argument(
'--only-print-uris', action="store_true",
help='Only prints a short representation of Resources')
parser.add_argument(
'--only-print-uri-content', action="store_true",
help='Only prints data retrieved from URIs and exists')
parser.add_argument(
'--socket-timeout', action="store", type=int,
help="Set the socket timeout")
parser.add_argument(
'-o', '--only-negotiate', action="store_true",
help='Only do content negotiation for given URIs and print the '
'response headers')
parser.add_argument(
'--GRAPH_SIZE_LIMIT', action="store", type=int,
help="Set maximum graph size that will be processed")
parser.add_argument('--print-all-resources', action="store_true")
def check_uri(url):
if not is_valid_url(url):
raise argparse.ArgumentTypeError("%r is not a valid URL" % url)
return url
parser.add_argument(
'origin_urls', action="store", nargs='+', type=check_uri,
help="Pass a list of URIs. ldtools will crawl them one by one")
return parser
def execute_ldtools(
verbosity,
origin_urls,
depth,
follow_all,
follow_uris,
socket_timeout,
GRAPH_SIZE_LIMIT,
print_all_resources,
only_print_uris,
only_print_uri_content,
only_negotiate
):
set_colored_logger(verbosity)
# customize Origin.objects.post_create_hook for performance reasons
def custom_post_create_hook(origin):
origin.timedelta = datetime.timedelta(minutes=5)
return origin
Origin.objects.post_create_hook = custom_post_create_hook
url_count = len(origin_urls)
if url_count > 1:
logger.info("Retrieving content of %s URLs" % url_count)
if follow_all:
only_follow_uris = None
logging.info("Following all URIs")
elif follow_uris:
only_follow_uris = follow_uris
logging.info("Following values matching: %s"
% ", ".join(only_follow_uris))
else:
only_follow_uris = []
if socket_timeout:
import socket
logger.info("Setting socket timeout to %s" % socket_timeout)
socket.setdefaulttimeout(socket_timeout)
kw = dict(raise_errors=False)
if GRAPH_SIZE_LIMIT:
kw["GRAPH_SIZE_LIMIT"] = GRAPH_SIZE_LIMIT
for url in origin_urls:
url = get_slash_url(url)
origin, created = Origin.objects.get_or_create(url)
logger.info("Retrieving content of %s" % origin.uri)
if only_negotiate or only_print_uri_content:
try:
data = origin.backend.GET(
uri=origin.uri,
httphandler=urllib2.HTTPHandler(debuglevel=1))
except Exception as exc:
print(exc)
continue
if only_print_uri_content:
print('\n', data, '\n')
else:
origin.GET(only_follow_uris=only_follow_uris, **kw)
if only_negotiate or only_print_uri_content:
sys.exit(0)
if depth:
for round in range(depth):
for origin in Origin.objects.all():
origin.GET(only_follow_uris=only_follow_uris, **kw)
for orig_url in origin_urls:
url = get_slash_url(orig_url)
origin = Origin.objects.get(url)
for r in origin.get_resources():
if r._uri == get_rdflib_uriref(orig_url):
logger.info(u"Printing all available information "
"about {0}".format(r._uri))
if hasattr(r, "_has_changes"):
delattr(r, "_has_changes")
if hasattr(r, "pk"):
delattr(r, "pk")
pprint.pprint(r.__dict__)
if print_all_resources:
all_resources = Resource.objects.all()
if (only_print_uris):
for resource in all_resources:
print(resource)
else:
for r in all_resources:
if hasattr(r, "_has_changes"):
delattr(r, "_has_changes")
if hasattr(r, "pk"):
delattr(r, "pk")
pprint.pprint(r.__dict__)
def main():
execute_ldtools(**get_parser().parse_args().__dict__)
|
bsd-2-clause
| -6,555,714,055,889,037,000
| 31
| 75
| 0.594286
| false
| 3.932584
| false
| false
| false
|
shaochangbin/crosswalk
|
app/tools/android/manifest_json_parser.py
|
1
|
8310
|
#!/usr/bin/env python
# Copyright (c) 2013, 2014 Intel Corporation. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Parse JSON-format manifest configuration file and
provide the specific fields, which have to be integrated with
packaging tool(e.g. make_apk.py) to generate xml-format manifest file.
Sample usage from shell script:
python manifest_json_parser.py --jsonfile=/path/to/manifest.json
"""
import json
import optparse
import os
import re
import sys
def HandlePermissionList(permission_list):
"""This function is used to handle the permission list and return the string
of permissions.
Args:
permission_list: the permission list, e.g.["permission1", "permission2"].
Returns:
The string of permissions with ':' as separator.
e.g. "permission1:permission2".
"""
permissions = list(permission_list)
reg_permission = re.compile(r'^[a-zA-Z\.]*$')
for permission in permissions:
if not reg_permission.match(permission):
print('\'Permissions\' field error, only alphabets and '
'\'.\' are allowed.')
sys.exit(1)
return ':'.join(permissions)
class ManifestJsonParser(object):
""" The class is used to parse json-format manifest file, recompose the fields
and provide the field interfaces required by the packaging tool.
Args:
input_path: the full path of the json-format manifest file.
"""
def __init__(self, input_path):
self.input_path = input_path
input_file = open(self.input_path)
try:
input_src = input_file.read()
self.data_src = json.JSONDecoder().decode(input_src)
self.ret_dict = self._output_items()
except (TypeError, ValueError, IOError):
print('There is a parser error in manifest.json file.')
sys.exit(1)
except KeyError:
print('There is a field error in manifest.json file.')
sys.exit(1)
finally:
input_file.close()
def _output_items(self):
""" The manifest field items are reorganized and returned as a
dictionary to support single or multiple values of keys.
Returns:
A dictionary to the corresponding items. the dictionary keys are
described as follows, the value is set to "" if the value of the
key is not set.
app_name: The application name.
version: The version number.
icons: An array of icons.
app_url: The url of application, e.g. hosted app.
description: The description of application.
app_root: The root path of the web, this flag allows to package
local web application as apk.
app_local_path: The relative path of entry file based on app_root,
this flag should work with "--app-root" together.
permissions: The permission list.
required_version: The required crosswalk runtime version.
plugin: The plug-in information.
fullscreen: The fullscreen flag of the application.
launch_screen: The launch screen configuration.
"""
ret_dict = {}
if 'name' not in self.data_src:
print('Error: no \'name\' field in manifest.json file.')
sys.exit(1)
ret_dict['app_name'] = self.data_src['name']
if 'version' not in self.data_src:
print('Error: no \'version\' field in manifest.json file.')
sys.exit(1)
ret_dict['version'] = self.data_src['version']
if 'launch_path' in self.data_src:
app_url = self.data_src['launch_path']
elif ('app' in self.data_src and
'launch' in self.data_src['app'] and
'local_path' in self.data_src['app']['launch']):
app_url = self.data_src['app']['launch']['local_path']
else:
app_url = ''
if app_url.lower().startswith(('http://', 'https://')):
app_local_path = ''
else:
app_local_path = app_url
app_url = ''
file_path_prefix = os.path.split(self.input_path)[0]
if 'icons' in self.data_src:
ret_dict['icons'] = self.data_src['icons']
else:
ret_dict['icons'] = {}
app_root = file_path_prefix
ret_dict['description'] = ''
if 'description' in self.data_src:
ret_dict['description'] = self.data_src['description']
ret_dict['app_url'] = app_url
ret_dict['app_root'] = app_root
ret_dict['app_local_path'] = app_local_path
ret_dict['permissions'] = ''
if 'permissions' in self.data_src:
try:
permission_list = self.data_src['permissions']
ret_dict['permissions'] = HandlePermissionList(permission_list)
except (TypeError, ValueError, IOError):
print('\'Permissions\' field error in manifest.json file.')
sys.exit(1)
ret_dict['required_version'] = ''
if 'required_version' in self.data_src:
ret_dict['required_version'] = self.data_src['required_version']
ret_dict['plugin'] = ''
if 'plugin' in self.data_src:
ret_dict['plugin'] = self.data_src['plugin']
if 'display' in self.data_src and 'fullscreen' in self.data_src['display']:
ret_dict['fullscreen'] = 'true'
else:
ret_dict['fullscreen'] = ''
ret_dict['launch_screen_img'] = ''
if 'launch_screen' in self.data_src:
if 'default' not in self.data_src['launch_screen']:
print('Error: no \'default\' field for \'launch_screen\'.')
sys.exit(1)
default = self.data_src['launch_screen']['default']
if 'image' not in default:
print('Error: no \'image\' field for \'launch_screen.default\'.')
sys.exit(1)
ret_dict['launch_screen_img'] = default['image']
return ret_dict
def ShowItems(self):
"""Show the processed results, it is used for command-line
internal debugging."""
print("app_name: %s" % self.GetAppName())
print("version: %s" % self.GetVersion())
print("description: %s" % self.GetDescription())
print("icons: %s" % self.GetIcons())
print("app_url: %s" % self.GetAppUrl())
print("app_root: %s" % self.GetAppRoot())
print("app_local_path: %s" % self.GetAppLocalPath())
print("permissions: %s" % self.GetPermissions())
print("required_version: %s" % self.GetRequiredVersion())
print("plugins: %s" % self.GetPlugins())
print("fullscreen: %s" % self.GetFullScreenFlag())
print('launch_screen.default.image: %s' % self.GetLaunchScreenImg())
def GetAppName(self):
"""Return the application name."""
return self.ret_dict['app_name']
def GetVersion(self):
"""Return the version number."""
return self.ret_dict['version']
def GetIcons(self):
"""Return the icons."""
return self.ret_dict['icons']
def GetAppUrl(self):
"""Return the URL of the application."""
return self.ret_dict['app_url']
def GetDescription(self):
"""Return the description of the application."""
return self.ret_dict['description']
def GetAppRoot(self):
"""Return the root path of the local web application."""
return self.ret_dict['app_root']
def GetAppLocalPath(self):
"""Return the local relative path of the local web application."""
return self.ret_dict['app_local_path']
def GetPermissions(self):
"""Return the permissions."""
return self.ret_dict['permissions']
def GetRequiredVersion(self):
"""Return the required crosswalk runtime version."""
return self.ret_dict['required_version']
def GetPlugins(self):
"""Return the plug-in path and file name."""
return self.ret_dict['plugin']
def GetFullScreenFlag(self):
"""Return the set fullscreen flag of the application."""
return self.ret_dict['fullscreen']
def GetLaunchScreenImg(self):
"""Return the default img for launch_screen."""
return self.ret_dict['launch_screen_img']
def main(argv):
"""Respond to command mode and show the processed field values."""
parser = optparse.OptionParser()
info = ('The input json-format file name. Such as: '
'--jsonfile=manifest.json')
parser.add_option('-j', '--jsonfile', action='store', dest='jsonfile',
help=info)
opts, _ = parser.parse_args()
if len(argv) == 1:
parser.print_help()
return 0
json_parser = ManifestJsonParser(opts.jsonfile)
json_parser.ShowItems()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
bsd-3-clause
| -8,263,335,172,196,524,000
| 34.211864
| 80
| 0.644043
| false
| 3.708166
| false
| false
| false
|
morevnaproject/RenderChan
|
renderchan/core.py
|
1
|
56360
|
__author__ = 'Konstantin Dmitriev'
__version__ = '1.0-alpha1'
import sys
from renderchan.file import RenderChanFile
from renderchan.project import RenderChanProjectManager
from renderchan.module import RenderChanModuleManager, RenderChanModule
from renderchan.utils import mkdirs
from renderchan.utils import float_trunc
from renderchan.utils import sync
from renderchan.utils import touch
from renderchan.utils import copytree
from renderchan.utils import which
from renderchan.utils import is_true_string
import os, time
import shutil
import subprocess
import zipfile
#TODO: This class actually should be named something like RenderChanJob (this better reflects its purpose)
class RenderChan():
def __init__(self):
self.datadir = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), "templates")
self.available_renderfarm_engines = ("puli","afanasy")
self.renderfarm_engine = ""
self.renderfarm_host = "127.0.0.1"
self.renderfarm_port = 8004
print("RenderChan initialized.")
self.start_time = time.time()
self.projects = RenderChanProjectManager()
self.modules = RenderChanModuleManager()
self.loadedFiles = {}
# TODO: dry_run and force shouldn't be stored in RenderChan object. It's better to pass them as arguments to submit()
self.dry_run = False
self.force = False
self.track = False
# Action. Possible values - render (default), print, pack, clean
self.action = "render"
# Option, which determines if RenderChan should create placeholders for missing files
# TODO: This option is not possible to set via commandline at this moment.
# TODO: Allow to configure how to deal with missing files: create empty placeholder (default), create warning placeholder, none or raise exception.
self.recreateMissing = False
self.force_proxy = False
self.trackedFiles = {}
self.trackedFilesStack = []
self.graph = None # used by renderfarm
# == taskgroups bug / commented ==
# The following are the special taskgroups used for managing stereo rendering
#self.taskgroupLeft = None
#self.taskgroupRight = None
# FIXME: The childTask is a dirty workaround, which we need because of broken taskgroups functionality (search for "taskgroups bug" string to get the commented code)
self.childTask = None
self.AfanasyBlockClass=None
self.cgru_location = "/opt/cgru"
self.snapshot_path = None
self.post_script = None
self.ffmpeg_binary = ''
ffmpeg_path = RenderChanModule.findBinary(self,'ffmpeg')
avconv_path = RenderChanModule.findBinary(self,'avconv')
if which(ffmpeg_path) != None:
self.ffmpeg_binary = ffmpeg_path
elif which(avconv_path) != None:
self.ffmpeg_binary = avconv_path
if self.ffmpeg_binary == '':
raise Exception('ERROR: No ffmpeg binary found. Please install ffmpeg.')
def __del__(self):
if self.renderfarm_engine == "":
t = time.time()-self.start_time
hours = int(t/3600)
t = t - hours*3600
minutes = int(t/60)
t = t - minutes*60
seconds = int(t)
print()
print()
print("Execution time: %02d:%02d:%02d " % ( hours, minutes, seconds ))
print()
def setHost(self, host):
self.renderfarm_host=host
def setPort(self, port):
self.renderfarm_port=port
def setStereoMode(self, mode):
self.setProfile(self.projects.profile, mode)
def setProfile(self, profile, stereo=None):
"""
:type profile: str
"""
if stereo == None:
stereo=self.projects.stereo
if self.projects.active:
# Update root project
self.projects.active.config["stereo"]=stereo
self.projects.active.loadRenderConfig(profile)
# Update child projects
for key in self.projects.list.keys():
project = self.projects.list[key]
project.config=self.projects.active.config.copy()
project.loadRenderConfig(self.projects.profile)
# Reload module configuration
loaded_modules = project.dependencies[:]
project.dependencies = []
for module_name in loaded_modules:
module = self.modules.get(module_name)
project.registerModule(module)
self.projects.profile=profile
self.projects.stereo=stereo
def submit(self, filename, dependenciesOnly=False, allocateOnly=False, stereo=""):
"""
:param filename:
:type filename: str
:param dependenciesOnly:
:param allocateOnly:
:param stereo:
:return:
"""
taskfile = RenderChanFile(filename, self.modules, self.projects)
self.trackFileBegin(taskfile)
if taskfile.project == None:
print(file=sys.stderr)
print("ERROR: Can't render a file which is not a part of renderchan project.", file=sys.stderr)
print(file=sys.stderr)
self.trackFileEnd()
return 1
if not taskfile.module:
print(file=sys.stderr)
extension = os.path.splitext(taskfile.getPath())[1]
if extension:
print("ERROR: The '%s' file type was not recoginized." % extension, file=sys.stderr)
else:
print("ERROR: The provided file does not have an extension.", file=sys.stderr)
print(file=sys.stderr)
self.trackFileEnd()
return 1
if self.action =="print":
self.addToGraph(taskfile, dependenciesOnly, allocateOnly)
print()
for file in self.trackedFiles.values():
print("File: "+file["source"])
print()
# Close cache
for path in self.projects.list.keys():
self.projects.list[path].cache.close()
elif self.action =="pack":
self.addToGraph(taskfile, dependenciesOnly, allocateOnly)
list = []
for file in self.trackedFiles.values():
list.append(file["source"])
commonpath = os.path.commonpath(list)
#for i,c in enumerate(list):
# list[i]=c[len(commonpath)+1:]
# print(list[i])
print()
zipname = os.path.basename(taskfile.getPath())+'.zip'
if os.path.exists(os.path.join(os.getcwd(),zipname)):
print("ERROR: File "+os.path.join(os.getcwd(),zipname)+" already exists.")
exit()
with zipfile.ZipFile(zipname, 'x') as myzip:
for i,c in enumerate(list):
print("Zipping file: "+c)
myzip.write(c, c[len(commonpath)+1:])
print("Written "+os.path.join(os.getcwd(),zipname)+".")
print()
# Close cache
for path in self.projects.list.keys():
self.projects.list[path].cache.close()
elif self.action =="render":
if self.renderfarm_engine=="afanasy":
if not os.path.exists(os.path.join(self.cgru_location,"afanasy")):
print("ERROR: Cannot render with afanasy, afanasy not found at cgru directory '%s'." % self.cgru_location, file=sys.stderr)
self.trackFileEnd()
return 1
os.environ["CGRU_LOCATION"]=self.cgru_location
os.environ["AF_ROOT"]=os.path.join(self.cgru_location,"afanasy")
sys.path.insert(0, os.path.join(self.cgru_location,"lib","python"))
sys.path.insert(0, os.path.join(self.cgru_location,"afanasy","python"))
from af import Job as AfanasyJob
from af import Block as AfanasyBlock
self.AfanasyBlockClass=AfanasyBlock
self.graph = AfanasyJob('RenderChan - %s - %s' % (taskfile.localPath, taskfile.projectPath))
elif self.renderfarm_engine=="puli":
from puliclient import Graph
self.graph = Graph( 'RenderChan graph', poolName="default" )
last_task = None
if stereo in ("vertical","v","vertical-cross","vc","horizontal","h","horizontal-cross","hc"):
# Left eye graph
self.setStereoMode("left")
self.addToGraph(taskfile, dependenciesOnly, allocateOnly)
if self.renderfarm_engine!="":
self.childTask = taskfile.taskPost
# Right eye graph
self.setStereoMode("right")
self.addToGraph(taskfile, dependenciesOnly, allocateOnly)
# Stitching altogether
if self.renderfarm_engine=="":
self.job_merge_stereo(taskfile, stereo)
elif self.renderfarm_engine=="afanasy":
name = "StereoPost - %f" % ( time.time() )
block = self.AfanasyBlockClass(name, 'generic')
block.setCommand("renderchan-job-launcher \"%s\" --action merge --profile %s --stereo %s --compare-time %f --active-project \"%s\"" % ( taskfile.getPath(), self.projects.profile, stereo, time.time(), self.projects.active.path ))
if taskfile.taskPost!=None:
block.setDependMask(taskfile.taskPost)
block.setNumeric(1,1,100)
block.setCapacity(100)
self.graph.blocks.append(block)
last_task = name
elif self.renderfarm_engine=="puli":
runner = "puliclient.contrib.commandlinerunner.CommandLineRunner"
# Add parent task which composes results and places it into valid destination
command = "renderchan-job-launcher \"%s\" --action merge --profile %s --stereo %s --compare-time %f --active-project %s" % ( taskfile.getPath(), self.projects.profile, stereo, time.time(), self.projects.active.path)
stereoTask = self.graph.addNewTask( name="StereoPost: "+taskfile.localPath, runner=runner, arguments={ "args": command} )
# Dummy task
#decomposer = "puliclient.contrib.generic.GenericDecomposer"
#params={ "cmd":"echo", "start":1, "end":1, "packetSize":1, "prod":"test", "shot":"test" }
#dummyTask = self.graph.addNewTask( name="StereoDummy", arguments=params, decomposer=decomposer )
# == taskgroups bug / commented ==
#self.graph.addEdges( [(self.taskgroupLeft, self.taskgroupRight)] )
#self.graph.addEdges( [(self.taskgroupRight, stereoTask)] )
#self.graph.addChain( [self.taskgroupLeft, dummyTask, self.taskgroupRight, stereoTask] )
if taskfile.taskPost!=None:
self.graph.addEdges( [(taskfile.taskPost, stereoTask)] )
last_task = stereoTask
else:
if stereo in ("left","l"):
self.setStereoMode("left")
elif stereo in ("right","r"):
self.setStereoMode("right")
self.addToGraph(taskfile, dependenciesOnly, allocateOnly)
last_task = taskfile.taskPost
# Post-script
if self.post_script:
if stereo in ("vertical","v","horizontal","h"):
script_arg = os.path.splitext(taskfile.getRenderPath())[0]+"-stereo-"+stereo[0:1]+os.path.splitext(taskfile.getRenderPath())[1]
else:
script_arg = taskfile.getRenderPath()
if self.renderfarm_engine=="":
commandline=[self.post_script, script_arg]
subprocess.run("\"%s\" \"%s\"" % ( self.post_script, script_arg), shell=True, check=True)
elif self.renderfarm_engine=="afanasy":
name = "Post Script - %f" % ( time.time() )
block = self.AfanasyBlockClass(name, 'generic')
block.setCommand("\"%s\" \"%s\"" % ( self.post_script, script_arg))
if last_task!=None:
block.setDependMask(last_task)
block.setNumeric(1,1,100)
block.setCapacity(100)
self.graph.blocks.append(block)
# Snapshot
if self.snapshot_path:
if stereo in ("vertical","v","horizontal","h"):
snapshot_source = os.path.splitext(taskfile.getRenderPath())[0]+"-stereo-"+stereo[0:1]+os.path.splitext(taskfile.getRenderPath())[1]
else:
snapshot_source = taskfile.getProfileRenderPath()
if self.renderfarm_engine=="":
self.job_snapshot(snapshot_source, self.snapshot_path)
elif self.renderfarm_engine=="afanasy":
name = "Snapshot - %f" % ( time.time() )
block = self.AfanasyBlockClass(name, 'generic')
block.setCommand("renderchan-job-launcher \"%s\" --action snapshot --target-dir %s" % ( snapshot_source, self.snapshot_path))
if last_task!=None:
block.setDependMask(last_task)
block.setNumeric(1,1,100)
block.setCapacity(50)
self.graph.blocks.append(block)
elif self.renderfarm_engine=="puli":
runner = "puliclient.contrib.commandlinerunner.CommandLineRunner"
# Add parent task which composes results and places it into valid destination
command = "renderchan-job-launcher \"%s\" --action snapshot --target-dir %s" % ( snapshot_source, self.snapshot_path)
snapshotTask = self.graph.addNewTask( name="Snapshot: "+taskfile.localPath, runner=runner, arguments={ "args": command} )
if last_task!=None:
self.graph.addEdges( [(last_task, snapshotTask)] )
# Make sure to close cache before submitting job to renderfarm
for path in self.projects.list.keys():
self.projects.list[path].cache.close()
# Submit job to renderfarm
if self.renderfarm_engine=="afanasy":
# Wait a moment to make sure cache is closed properly
# (this allows to avoid issues with shared nfs drives)
time.sleep(1)
self.graph.output()
self.graph.send()
elif self.renderfarm_engine=="puli":
self.graph.submit(self.renderfarm_host, self.renderfarm_port)
else:
# TODO: Render our Graph
pass
self.trackFileEnd()
def addToGraph(self, taskfile, dependenciesOnly=False, allocateOnly=False):
"""
:type taskfile: RenderChanFile
"""
for path in self.loadedFiles.keys():
self.loadedFiles[path].isDirty=None
#self.loadedFiles={}
# == taskgroups bug / commented ==
# Prepare taskgroups if we do stereo rendering
#if self.projects.active.getConfig("stereo")=="left":
# self.taskgroupLeft = self.graph.addNewTaskGroup( name="TG Left: "+taskfile.getPath() )
#elif self.projects.active.getConfig("stereo")=="right":
# self.taskgroupRight = self.graph.addNewTaskGroup( name="TG Right: "+taskfile.getPath() )
if allocateOnly and dependenciesOnly:
if os.path.exists(taskfile.getRenderPath()):
self.parseDirectDependency(taskfile, None, self.dry_run, self.force)
else:
taskfile.endFrame = taskfile.startFrame + 2
self.parseRenderDependency(taskfile, allocateOnly, self.dry_run, self.force)
elif dependenciesOnly:
self.parseDirectDependency(taskfile, None, self.dry_run, self.force)
elif allocateOnly:
if os.path.exists(taskfile.getRenderPath()):
print("File is already allocated.")
sys.exit(0)
taskfile.dependencies=[]
taskfile.endFrame = taskfile.startFrame + 2
self.parseRenderDependency(taskfile, allocateOnly, self.dry_run, self.force)
else:
self.parseRenderDependency(taskfile, allocateOnly, self.dry_run, self.force)
self.childTask = None
def trackFileBegin(self, taskfile):
"""
:type taskfile: RenderChanFile
"""
if self.track:
key = taskfile.getPath()
if key not in self.trackedFiles:
trackedFile = {}
trackedFile["source"] = key
trackedFile["deps"] = []
trackedFile["backDeps"] = []
self.trackedFiles[key] = trackedFile;
if self.trackedFilesStack:
parentKey = self.trackedFilesStack[-1]
if parentKey != key:
if key not in self.trackedFiles[parentKey]["deps"]:
self.trackedFiles[parentKey]["deps"].append(key)
if parentKey not in self.trackedFiles[key]["backDeps"]:
self.trackedFiles[key]["backDeps"].append(parentKey)
self.trackedFilesStack.append(key)
if taskfile.project and key != taskfile.project.confPath and os.path.exists(taskfile.project.confPath):
projectKey = taskfile.project.confPath
if projectKey not in self.trackedFiles:
trackedFile = {}
trackedFile["source"] = projectKey
trackedFile["deps"] = []
trackedFile["backDeps"] = []
self.trackedFiles[projectKey] = trackedFile
if projectKey not in self.trackedFiles[key]["deps"]:
self.trackedFiles[key]["deps"].append(projectKey)
if key not in self.trackedFiles[projectKey]["backDeps"]:
self.trackedFiles[projectKey]["backDeps"].append(key)
def trackFileEnd(self):
if self.track:
self.trackedFilesStack.pop()
def parseRenderDependency(self, taskfile, allocateOnly, dryRun = False, force = False):
"""
:type taskfile: RenderChanFile
"""
# TODO: Re-implement this function in the same way as __not_used__syncProfileData() ?
self.trackFileBegin(taskfile)
isDirty = False
# First, let's ensure, that we are in sync with profile data
t=taskfile.project.switchProfile(taskfile.project.getProfileDirName())
checkTime=None
if os.path.exists(taskfile.getProfileRenderPath()+".sync"):
checkFile=os.path.join(taskfile.getProjectRoot(),"render","project.conf","profile.conf")
checkTime=float_trunc(os.path.getmtime(checkFile),1)
if os.path.exists(taskfile.getProfileRenderPath()):
source=taskfile.getProfileRenderPath()
dest=taskfile.getRenderPath()
sync(source,dest,checkTime)
source=os.path.splitext(taskfile.getProfileRenderPath())[0]+"-alpha."+taskfile.getFormat()
dest=os.path.splitext(taskfile.getRenderPath())[0]+"-alpha."+taskfile.getFormat()
sync(source,dest,checkTime)
else:
isDirty = True
t.unlock()
if not os.path.exists(taskfile.getProfileRenderPath()):
# If no rendering exists, then obviously rendering is required
isDirty = True
compareTime = None
if os.environ.get('DEBUG'):
print("DEBUG: Dirty = 1 (no rendering exists)")
else:
# Otherwise we have to check against the time of the last rendering
compareTime = float_trunc(os.path.getmtime(taskfile.getProfileRenderPath()),1)
# Get "dirty" status for the target file and all dependent tasks, submitted as dependencies
(isDirtyValue, tasklist, maxTime)=self.parseDirectDependency(taskfile, compareTime, dryRun, force)
isDirty = isDirty or isDirtyValue
# Mark this file as already parsed and thus its "dirty" value is known
taskfile.isDirty=isDirty
# If rendering is requested
if not dryRun and (isDirty or force):
# Make sure we have all directories created
mkdirs(os.path.dirname(taskfile.getProfileRenderPath()))
mkdirs(os.path.dirname(taskfile.getRenderPath()))
params = taskfile.getParams(self.force_proxy)
# Keep track of created files to allow merging them later
output_list = os.path.splitext( taskfile.getProfileRenderPath() )[0] + ".txt"
output_list_alpha = os.path.splitext( taskfile.getProfileRenderPath() )[0] + "-alpha.txt"
if taskfile.getPacketSize() > 0:
segments = self.decompose(taskfile.getStartFrame(), taskfile.getEndFrame(), taskfile.getPacketSize())
f = open(output_list, 'w')
fa = None
try:
if "extract_alpha" in params and is_true_string(params["extract_alpha"]):
fa = open(output_list_alpha, 'w')
for range in segments:
start=range[0]
end=range[1]
chunk_name = taskfile.getProfileRenderPath(start,end)
f.write("file '%s'\n" % (chunk_name))
if "extract_alpha" in params and is_true_string(params["extract_alpha"]):
alpha_output = os.path.splitext(chunk_name)[0] + "-alpha" + os.path.splitext(chunk_name)[1]
fa.write("file '%s'\n" % (alpha_output))
finally:
f.close()
if fa:
fa.close()
else:
segments=[ (None,None) ]
if allocateOnly:
# Make sure this file will be re-rendered next time
compare_time=taskfile.mtime-1000
else:
compare_time=maxTime
if self.renderfarm_engine=="":
for range in segments:
start=range[0]
end=range[1]
self.job_render(taskfile, taskfile.getFormat(), self.updateCompletion, start, end, compare_time)
self.job_merge(taskfile, taskfile.getFormat(), taskfile.project.getConfig("stereo"), compare_time)
elif self.renderfarm_engine=="afanasy":
# Render block
command = "renderchan-job-launcher \"%s\" --action render --format %s --profile %s --compare-time %s --active-project \"%s\"" % ( taskfile.getPath(), taskfile.getFormat(), self.projects.profile, compare_time, self.projects.active.path)
if self.projects.stereo!="":
command += " --stereo %s" % (self.projects.stereo)
if taskfile.getPacketSize()>0:
command += " --start @#@ --end @#@"
if taskfile.project.path == self.projects.active.path:
name = "%s - %f" % ( taskfile.localPath, time.time() )
else:
name = "%s - %s - %f" % ( taskfile.localPath, taskfile.projectPath, time.time() )
# Afanasy uses his own algorythms to parse output for the modules it supports.
# For example, it terminates rendering process if Blender complains for missing library
# file.
# This behaviour is not desirable, since it can confuse users : file rendered succesfully
# with RenderChan in standalone mode, but fails to render on Renderfarm. So, I have diabled
# blocktype assigment below.
# Food for thought: In the future we need to think on how to handle integrity check on
# our own.
# Food for thought: SHould we make blocktype assigment an option?
#
#if taskfile.module.getName() in ("blender"):
# blocktype=taskfile.module.getName()
#else:
# blocktype="generic"
blocktype="generic"
block = self.AfanasyBlockClass(name, blocktype)
block.setCommand(command)
block.setErrorsTaskSameHost(-2)
if taskfile.getPacketSize()>0:
block.setNumeric(taskfile.getStartFrame(),taskfile.getEndFrame(),taskfile.getPacketSize())
else:
block.setNumeric(1,1,100)
if taskfile.module.getName() in ("flac","mp3","vorbis"):
block.setCapacity(50)
elif taskfile.module.getName() in ("krita"):
block.setCapacity(500)
depend_mask=[]
for dep_task in tasklist:
depend_mask.append(dep_task)
if self.childTask!=None:
depend_mask.append(self.childTask)
block.setDependMask("|".join(depend_mask))
command = "renderchan-job-launcher \"%s\" --action merge --format %s --profile %s --compare-time %s --active-project \"%s\"" % ( taskfile.getPath(), taskfile.getFormat(), self.projects.profile, compare_time, self.projects.active.path )
if self.projects.stereo!="":
command += " --stereo %s" % (self.projects.stereo)
self.graph.blocks.append(block)
# Post block
if taskfile.project.path == self.projects.active.path:
name_post = "Post %s - %f" % ( taskfile.localPath, time.time() )
else:
name_post = "Post %s - %s - %f" % ( taskfile.localPath, taskfile.projectPath, time.time() )
taskfile.taskPost = name_post
block = self.AfanasyBlockClass(name_post, "generic")
block.setNumeric(1,1,100)
block.setCommand(command)
block.setDependMask(name)
block.setErrorsTaskSameHost(-2)
block.setCapacity(50)
self.graph.blocks.append(block)
elif self.renderfarm_engine=="puli":
# Puli part here
graph_destination = self.graph
# == taskgroups bug / commented ==
#if self.projects.active.getConfig("stereo")=="left":
# graph_destination = self.taskgroupLeft
# name+=" (L)"
#elif self.projects.active.getConfig("stereo")=="right":
# graph_destination = self.taskgroupRight
# name+=" (R)"
#else:
# graph_destination = self.graph
runner = "puliclient.contrib.commandlinerunner.CommandLineRunner"
# Add parent task which composes results and places it into valid destination
command = "renderchan-job-launcher \"%s\" --action merge --format %s --profile %s --compare-time %s --active-project \"%s\"" % ( taskfile.getPath(), taskfile.getFormat(), self.projects.profile, compare_time, self.projects.active.path )
if self.projects.stereo!="":
command += " --stereo %s" % (self.projects.stereo)
taskfile.taskPost=graph_destination.addNewTask( name="Post: "+taskfile.localPath, runner=runner, arguments={ "args": command} )
# Add rendering segments
for range in segments:
start=range[0]
end=range[1]
if start!=None and end!=None:
segment_name = "Render: %s (%s-%s)" % (taskfile.localPath, start, end)
command = "renderchan-job-launcher \"%s\" --action render --format %s --profile %s --start %s --end %s --compare-time %s --active-project \"%s\"" % ( taskfile.getPath(), taskfile.getFormat(), self.projects.profile, start, end, compare_time, self.projects.active.path )
else:
segment_name = "Render: %s" % (taskfile.localPath)
command = "renderchan-job-launcher \"%s\" --action render --format %s --profile %s --compare-time %s --active-project \"%s\"" % ( taskfile.getPath(), taskfile.getFormat(), self.projects.profile, compare_time, self.projects.active.path )
if self.projects.stereo!="":
command += " --stereo %s" % (self.projects.stereo)
task=graph_destination.addNewTask( name=segment_name, runner=runner, arguments={ "args": command} )
self.graph.addEdges( [(task, taskfile.taskPost)] )
# Add edges for dependent tasks
for dep_task in tasklist:
self.graph.addEdges( [(dep_task, task)] )
if self.childTask!=None:
self.graph.addEdges( [(self.childTask, task)] )
self.trackFileEnd()
return isDirty
def parseDirectDependency(self, taskfile, compareTime, dryRun = False, force = False):
"""
:type taskfile: RenderChanFile
"""
self.trackFileBegin(taskfile)
isDirty=False
tasklist=[]
# maxTime is the maximum of modification times for all direct dependencies.
# It allows to compare with already rendered pieces and continue rendering
# if they are rendered AFTER the maxTime.
#
# But, if we have at least one INDIRECT dependency (i.e. render task) and it is submitted
# for rendering, then we can't compare with maxTime (because dependency will be rendered
# and thus rendering should take place no matter what).
maxTime = taskfile.getTime()
taskfile.pending=True # we need this to avoid circular dependencies
if not taskfile.isFrozen() or force:
deps = taskfile.getDependencies()
for path in deps:
path = os.path.abspath(path)
if path in self.loadedFiles.keys():
dependency = self.loadedFiles[path]
if dependency.pending:
# Avoid circular dependencies
print("Warning: Circular dependency detected for %s. Skipping." % (path))
continue
else:
dependency = RenderChanFile(path, self.modules, self.projects)
if not os.path.exists(dependency.getPath()):
if self.recreateMissing and dependency.projectPath!='':
# Let's look if we have a placeholder template
ext = os.path.splitext(path)[1]
placeholder = os.path.join(self.datadir, "missing", "empty" + ext)
if os.path.exists(placeholder):
print(" Creating an empty placeholder for %s..." % path)
mkdirs(os.path.dirname(path))
shutil.copy(placeholder, path)
t = time.mktime(time.strptime('01.01.1981 00:00:00', '%d.%m.%Y %H:%M:%S'))
os.utime(path,(t,t))
else:
print(" Skipping file %s..." % path)
else:
print(" Skipping file %s..." % path)
continue
self.loadedFiles[dependency.getPath()]=dependency
if dependency.project!=None and dependency.module!=None:
self.loadedFiles[dependency.getRenderPath()]=dependency
# Alpha
renderpath_alpha=os.path.splitext(dependency.getRenderPath())[0]+"-alpha."+dependency.getFormat()
self.loadedFiles[renderpath_alpha]=dependency
# Check if this is a rendering dependency
if path != dependency.getPath():
# We have a new task to render
if dependency.isDirty==None:
if dependency.module!=None:
dep_isDirty = self.parseRenderDependency(dependency, False, dryRun, force)
else:
raise Exception("No module to render file" + dependency.getPath())
else:
# The dependency was already submitted to graph
dep_isDirty = dependency.isDirty
if dep_isDirty:
# Let's return submitted task into tasklist
if dependency.taskPost not in tasklist:
tasklist.append(dependency.taskPost)
# Increase maxTime, because re-rendering of dependency will take place
maxTime=time.time()
isDirty = True
else:
# If no rendering requested, we still have to check if rendering result
# is newer than compareTime
#if os.path.exists(dependency.getRenderPath()): -- file is obviously exists, because isDirty==0
timestamp=float_trunc(os.path.getmtime(dependency.getProfileRenderPath()),1)
if compareTime is None:
isDirty = True
if os.environ.get('DEBUG'):
print("DEBUG: %s:" % taskfile.getPath())
print("DEBUG: Dirty = 1 (no compare time)")
print()
elif timestamp > compareTime:
isDirty = True
if os.environ.get('DEBUG'):
print("DEBUG: %s:" % taskfile.getPath())
print("DEBUG: Dirty = 1 (dependency timestamp is higher)")
print("DEBUG: compareTime = %f" % (compareTime))
print("DEBUG: dependency time = %f" % (timestamp))
print()
if timestamp>maxTime:
maxTime=timestamp
else:
# No, this is an ordinary dependency
(dep_isDirty, dep_tasklist, dep_maxTime) = self.parseDirectDependency(dependency, compareTime, dryRun, force)
isDirty = isDirty or dep_isDirty
maxTime = max(maxTime, dep_maxTime)
for task in dep_tasklist:
if task not in tasklist:
tasklist.append(task)
if not isDirty and not force:
timestamp = float_trunc(taskfile.getTime(), 1)
if compareTime is None:
if os.environ.get('DEBUG'):
print("DEBUG: %s:" % taskfile.getPath())
print("DEBUG: Dirty = 1 (no compare time)")
print()
isDirty = True
elif timestamp > compareTime:
isDirty = True
if os.environ.get('DEBUG'):
print("DEBUG: %s:" % taskfile.getPath())
print("DEBUG: Dirty = 1 (source timestamp is higher)")
print("DEBUG: compareTime = %f" % (compareTime))
print("DEBUG: source time = %f" % (timestamp))
print()
if timestamp>maxTime:
maxTime=timestamp
# Parse pack.lst and FILENAME.pack.lst files
if taskfile.projectPath:
deps = []
if self.action == "pack":
# pack.lst
check_path = os.path.dirname(taskfile.getPath())
while len(check_path) >= len(taskfile.projectPath):
path = os.path.join(check_path,"pack.lst")
if os.path.exists(path) and not path in self.loadedFiles.keys():
deps.append(path)
check_path = os.path.dirname(check_path)
# FILENAME.pack.lst
path = taskfile.getPath()+".pack.lst"
if os.path.exists(path) and not path in self.loadedFiles.keys():
deps.append(path)
for path in deps:
dependency = RenderChanFile(path, self.modules, self.projects)
self.loadedFiles[dependency.getPath()]=dependency
# NOTE: We don't need to modify dirty state of our taskfile, because
# packed data shouldn't trigger additional rendering. This is also why
# we don't store any returned values from parseDirectDependency().
# We still need to call parseDirectDependency() to make sure the
# dependencies of pack.lst will get added to self.trackedFiles.
self.parseDirectDependency(dependency, compareTime, dryRun, force)
taskfile.pending=False
self.trackFileEnd()
return (isDirty, list(tasklist), maxTime)
def updateCompletion(self, value):
print("Rendering: %s" % (value*100))
def __not_used__syncProfileData(self, renderpath):
if renderpath in self.loadedFiles.keys():
taskfile = self.loadedFiles[renderpath]
if taskfile.pending:
# Avoid circular dependencies
print("Warning: Circular dependency detected for %s. Skipping." % (renderpath))
return
else:
taskfile = RenderChanFile(renderpath, self.modules, self.projects)
if not os.path.exists(taskfile.getPath()):
print(" No source file for %s. Skipping." % renderpath)
return
self.loadedFiles[taskfile.getPath()]=taskfile
taskfile.pending=True # we need this to avoid circular dependencies
if taskfile.project!=None and taskfile.module!=None:
self.loadedFiles[taskfile.getRenderPath()]=taskfile
deps = taskfile.getDependencies()
for path in deps:
self.syncProfileData(path)
if renderpath != taskfile.getPath():
# TODO: Change parseRenderDependency() in the same way?
checkFile=os.path.join(taskfile.getProjectRoot(),"render","project.conf","profile.conf")
checkTime=float_trunc(os.path.getmtime(checkFile),1)
source=taskfile.getProfileRenderPath()
dest=taskfile.getRenderPath()
sync(source,dest,checkTime)
source=os.path.splitext(taskfile.getProfileRenderPath())[0]+"-alpha."+taskfile.getFormat()
dest=os.path.splitext(taskfile.getRenderPath())[0]+"-alpha."+taskfile.getFormat()
sync(source,dest,checkTime)
taskfile.pending=False
def job_render(self, taskfile, format, updateCompletion, start=None, end=None, compare_time=None):
"""
:type taskfile: RenderChanFile
"""
if start==None or end==None:
output = taskfile.getProfileRenderPath(0,0)
start=taskfile.getStartFrame()
end=taskfile.getEndFrame()
else:
output = taskfile.getProfileRenderPath(start,end)
if not os.path.exists(os.path.dirname(output)):
os.makedirs(os.path.dirname(output))
# Check if we really need to re-render
uptodate=False
if compare_time and not self.force:
if os.path.exists(output+".done") and os.path.exists(output):
if float_trunc(os.path.getmtime(output+".done"),1) >= compare_time:
# Hurray! No need to re-render that piece.
uptodate=True
if not uptodate:
# PROJECT LOCK
# Make sure our rendertree is in sync with current profile
locks=[]
for project in self.projects.list.values():
t=project.switchProfile(taskfile.project.getProfileDirName())
locks.append(t)
try:
if os.path.isdir(output):
shutil.rmtree(output)
if os.path.exists(output+".done"):
os.remove(output+".done")
# TODO: Create file lock here
params = taskfile.getParams(self.force_proxy)
taskfile.module.render(taskfile.getPath(),
output,
int(start),
int(end),
format,
updateCompletion,
params)
touch(output + ".done", compare_time)
if "extract_alpha" in params and is_true_string(params["extract_alpha"]):
alpha_output = os.path.splitext(output)[0] + "-alpha" + os.path.splitext(output)[1]
touch(alpha_output + ".done", compare_time)
# TODO: Release file lock here
except:
for lock in locks:
lock.unlock()
print("Unexpected error:", sys.exc_info()[0])
raise
# Releasing PROJECT LOCK
for lock in locks:
lock.unlock()
else:
print(" This chunk is already up to date. Skipping.")
updateCompletion(1.0)
def job_merge(self, taskfile, format, stereo, compare_time=None):
"""
:type taskfile: RenderChanFile
"""
# PROJECT LOCK
# Make sure our rendertree is in sync with current profile
locks=[]
for project in self.projects.list.values():
t=project.switchProfile(taskfile.project.getProfileDirName())
locks.append(t)
try:
params = taskfile.getParams(self.force_proxy)
suffix_list = [""]
if "extract_alpha" in params and is_true_string(params["extract_alpha"]):
suffix_list.append("-alpha")
for suffix in suffix_list:
output = os.path.splitext(taskfile.getRenderPath())[0] + suffix + "." + format
profile_output = os.path.splitext( taskfile.getProfileRenderPath() )[0] + suffix + "." + format
profile_output_list = os.path.splitext(profile_output)[0] + ".txt"
# We need to merge the rendered files into single one
print("Merging: %s" % profile_output)
# But first let's check if we really need to do that
uptodate = False
if os.path.exists(profile_output):
if os.path.exists(profile_output + ".done") and \
float_trunc(os.path.getmtime(profile_output + ".done"), 1) >= compare_time:
# Hurray! No need to merge that piece.
uptodate = True
else:
if os.path.isdir(profile_output):
shutil.rmtree(profile_output)
else:
os.remove(profile_output)
if os.path.exists(profile_output + ".done"):
os.remove(profile_output + ".done")
if not uptodate:
if taskfile.getPacketSize() > 0:
if os.path.exists(profile_output_list):
# Check if we really have all segments rendered correctly
with open(profile_output_list, 'r') as f:
segments = []
for line in f:
line = line.strip()[6:-1]
segments.append(line)
if not os.path.exists(line+".done") or not os.path.exists(line):
print("ERROR: Not all segments were rendered. Aborting.", file=sys.stderr)
exit(1)
if os.path.isfile(profile_output+".done"):
os.remove(profile_output+".done")
if format == "avi":
subprocess.check_call(
[self.ffmpeg_binary, "-y", "-safe", "0", "-f", "concat", "-i", profile_output_list, "-c", "copy", profile_output])
else:
# Merge all sequences into single directory
for line in segments:
print(line)
copytree(line, profile_output, hardlinks=True)
os.remove(profile_output_list)
for line in segments:
if os.path.isfile(line):
os.remove(line)
else:
shutil.rmtree(line, ignore_errors=True)
if os.path.isfile(line+".done"):
os.remove(line+".done")
touch(profile_output + ".done", float(compare_time))
else:
print(" This chunk is already merged. Skipping.")
#updateCompletion(0.5)
else:
segment = os.path.splitext( taskfile.getProfileRenderPath(0,0) )[0] + suffix + "." + format
if os.path.exists(segment+".done") and os.path.exists(segment):
os.rename(segment, profile_output)
touch(profile_output + ".done", float(compare_time))
else:
print("ERROR: Not all segments were rendered. Aborting.", file=sys.stderr)
exit(1)
# Add LST file
if format in RenderChanModule.imageExtensions and os.path.isdir(profile_output):
lst_profile_path = os.path.splitext(profile_output)[0] + ".lst"
lst_path = os.path.splitext(output)[0] + ".lst"
with open(lst_profile_path, 'w') as f:
f.write("FPS %s\n" % params["fps"])
for filename in sorted(os.listdir(profile_output)):
if filename.endswith(format):
f.write("%s/%s\n" % ( os.path.basename(profile_output), filename ))
sync(lst_profile_path, lst_path)
# Compatibility
if taskfile.project.version < 1:
lst_profile_path = os.path.join(profile_output, "file.lst")
lst_path = os.path.join(output, "file.lst")
with open(lst_profile_path, 'w') as f:
f.write("FPS %s\n" % params["fps"])
for filename in sorted(os.listdir(profile_output)):
if filename.endswith(format):
f.write("%s\n" % filename)
sync(lst_profile_path, lst_path)
sync(profile_output, output)
#touch(output+".done",arguments["maxTime"])
touch(output, float(compare_time))
except:
print("ERROR: Merge operation failed.", file=sys.stderr)
for lock in locks:
lock.unlock()
exit(1)
# Releasing PROJECT LOCK
for lock in locks:
lock.unlock()
#updateCompletion(1)
def job_merge_stereo(self, taskfile, mode, format="mp4"):
output = os.path.splitext(taskfile.getRenderPath())[0]+"-stereo-%s."+format
prev_mode = self.projects.stereo
self.setStereoMode("left")
input_left = taskfile.getProfileRenderPath()
self.setStereoMode("right")
input_right = taskfile.getProfileRenderPath()
self.setStereoMode(prev_mode)
if mode.endswith("c") or mode.endswith("-cross"):
output %= mode[0:1] + "c"
temp = input_left
input_left = input_right
input_right = temp
else:
output %= mode[0:1]
print("Merging: %s" % output)
# But first let's check if we really need to do that
uptodate = False
if os.path.exists(output):
if os.path.exists(output + ".done") and \
os.path.exists(input_left) and \
os.path.exists(input_right) and \
float_trunc(os.path.getmtime(output + ".done"), 1) >= float_trunc(os.path.getmtime(input_left), 1) and \
float_trunc(os.path.getmtime(output + ".done"), 1) >= float_trunc(os.path.getmtime(input_right), 1):
# Hurray! No need to merge that piece.
uptodate = True
else:
if os.path.isdir(output):
shutil.rmtree(output)
else:
os.remove(output)
if os.path.exists(output + ".done"):
os.remove(output + ".done")
if not uptodate:
if mode[0:1]=='v':
subprocess.check_call(
["ffmpeg", "-y", "-i", input_left, "-i", input_right,
"-filter_complex", "[0:v]setpts=PTS-STARTPTS, pad=iw:ih*2[bg]; [1:v]setpts=PTS-STARTPTS[fg]; [bg][fg]overlay=0:h",
"-c:v", "libx264", "-pix_fmt", "yuv420p", "-crf", "1",
"-c:a", "aac", "-qscale:a", "0",
"-f", "mp4",
output])
else:
subprocess.check_call(
["ffmpeg", "-y", "-i", input_left, "-i", input_right,
"-filter_complex", "[0:v]setpts=PTS-STARTPTS, pad=iw*2:ih[bg]; [1:v]setpts=PTS-STARTPTS[fg]; [bg][fg]overlay=w",
"-c:v", "libx264", "-pix_fmt", "yuv420p", "-crf", "1",
"-c:a", "aac", "-qscale:a", "0",
"-f", "mp4",
output])
touch(output + ".done", os.path.getmtime(output))
else:
print(" This chunk is already merged. Skipping.")
def job_snapshot(self, renderpath, snapshot_dir):
if not os.path.exists(snapshot_dir):
mkdirs(snapshot_dir)
time_string = "%s" % ( time.strftime("%Y%m%d-%H%M%S") )
filename = os.path.splitext(os.path.basename(renderpath))[0] + "-" + time_string + os.path.splitext(renderpath)[1]
snapshot_path = os.path.join(snapshot_dir, filename)
print()
print("Creating snapshot to %s ..." % (filename))
print()
if os.path.isdir(snapshot_path):
try:
copytree(renderpath, snapshot_dir, hardlinks=True)
except:
copytree(renderpath, snapshot_dir, hardlinks=False)
else:
try:
os.link(renderpath, snapshot_path)
except:
shutil.copy2(renderpath, snapshot_path)
def decompose(self, start, end, packetSize, framesList=""):
packetSize = int(packetSize)
result=[]
if len(framesList) != 0:
frames = framesList.split(",")
for frame in frames:
if "-" in frame:
frameList = frame.split("-")
start = int(frameList[0])
end = int(frameList[1])
length = end - start + 1
fullPacketCount, lastPacketCount = divmod(length, packetSize)
if length < packetSize:
result.append((start, end))
else:
for i in range(fullPacketCount):
packetStart = start + i * packetSize
packetEnd = packetStart + packetSize - 1
result.append((packetStart, packetEnd))
if lastPacketCount:
packetStart = start + (i + 1) * packetSize
result.append((packetStart, end))
else:
result.append((int(frame), int(frame)))
else:
start = int(start)
end = int(end)
length = end - start + 1
fullPacketCount, lastPacketCount = divmod(length, packetSize)
if length < packetSize:
result.append((start, end))
else:
for i in range(fullPacketCount):
packetStart = start + i * packetSize
packetEnd = packetStart + packetSize - 1
result.append((packetStart, packetEnd))
if lastPacketCount:
packetStart = start + (i + 1) * packetSize
result.append((packetStart, end))
return result
def loadFile(self, filename):
return RenderChanFile(filename, self.modules, self.projects)
class Attribution():
def __init__(self, filename, moduleManager=None, projectManager=None):
self.modules = moduleManager
if self.modules==None:
self.modules = RenderChanModuleManager()
self.projects = projectManager
if self.projects==None:
self.projects = RenderChanProjectManager()
self.licenses = {}
self.freesound_items = {} # author:[title1,title2,...]
taskfile = RenderChanFile(filename, self.modules, self.projects)
self.parse(taskfile)
def parse(self, taskfile):
for dep in taskfile.getDependencies():
t = RenderChanFile(dep, self.modules, self.projects)
metadata = t.getMetadata()
if "freesound" in metadata.sources:
for author in metadata.authors:
if author not in self.freesound_items:
self.freesound_items[author]=[]
if metadata.title not in self.freesound_items[author]:
self.freesound_items[author].append(metadata.title)
if not metadata.license == None:
if metadata.license not in self.licenses:
self.licenses[metadata.license]=[]
self.licenses[metadata.license].append(t.getPath())
self.parse(t)
def output(self):
print()
print("== Sound FX ==")
print("This video uses these sounds from freesound:")
print()
for author in self.freesound_items.keys():
print('"'+'", "'.join(self.freesound_items[author])+'" by '+author)
print()
print("== Licenses ==")
print(", ".join(self.licenses.keys()))
print()
print("== Files sorted by license ==")
for license in self.licenses.keys():
print(license+":")
for file in self.licenses[license]:
print(" "+file)
print()
|
bsd-3-clause
| 7,134,858,892,546,014,000
| 41.9246
| 292
| 0.527715
| false
| 4.45745
| false
| false
| false
|
JoseBlanca/seq_crumbs
|
crumbs/seq/alignment_result.py
|
1
|
48822
|
'''This module holds the code that allows to analyze the alignment search
result analysis.
It can deal with blasts, iprscan or ssaha2 results.
This results can be parsed, filtered and analyzed.
This module revolves around a memory structure that represents a blast or
an iprscan result. The schema of this structure is:
result = {'query':the_query_sequence,
'matches': [a_list_of_matches(hits in the blast terminology)]
}
The sequence can have: name, description, annotations={'database':some db} and
len(sequence).
Every match is a dict.
match = {'subject':the subject sequence
'start' :match start position in bp in query
'end' :match end position in bp in query
'subject_start' : match start position in bp in subject
'subject_end' :match end position in bp in subject
'scores' :a dict with the scores
'match_parts': [a list of match_parts(hsps in the blast lingo)]
'evidences' : [a list of tuples for the iprscan]
}
All the scores are holded in a dict
scores = {'key1': value1, 'key2':value2}
For instance the keys could be expect, similarity and identity for the blast
match_part is a dict:
match_part = {'query_start' : the query start in the alignment in bp
'query_end' : the query end in the alignment in bp
'query_strand' : 1 or -1
'subject_start' : the subject start in the alignment in bp
'subject_end' : the subject end in the alignment in bp
'subject_strand' : 1 or -1
'scores' :a dict with the scores
}
Iprscan has several evidences generated by different programs and databases
for every match. Every evidence is similar to a match.
'''
# Copyright 2009 Jose Blanca, Peio Ziarsolo, COMAV-Univ. Politecnica Valencia
# This file is part of franklin.
# franklin is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# franklin is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with franklin. If not, see <http://www.gnu.org/licenses/>.
from __future__ import division
import itertools
import copy
import os
from math import log10
from crumbs.utils.optional_modules import NCBIXML
from crumbs.utils.tags import SUBJECT, QUERY, ELONGATED
from crumbs.utils.segments_utils import merge_overlaping_segments
def _text_blasts_in_file(fhand):
'It returns from Query= to Query'
cache = ''
first_time = True
for line in fhand:
if line.startswith('Query='):
if first_time:
cache = ''
first_time = False
else:
yield cache
cache = ''
cache += line
else:
if not first_time:
yield cache
def _split_description(string):
'It splits the description'
items = string.split(' ', 1)
name = items[0]
desc = items[1] if len(items) == 2 else None
return name, desc
def _text_blast_parser(fhand):
'It parses the blast results'
result = None
previous_query = None
for blast in _text_blasts_in_file(fhand):
in_query_def = False
in_subject_def = False
for line in blast.splitlines():
line = line.strip()
if not line:
continue
if line.startswith('Query='):
query_name = line.split('=')[-1].strip()
query_name, query_desc = _split_description(query_name)
in_query_def = True
subject_name = None
if line.startswith('Subject=') or line.startswith('>'):
if line.startswith('>'):
subject_name = line[1:].strip()
else:
subject_name = line.split('=')[-1].strip()
subject_name, subject_desc = _split_description(subject_name)
in_subject_def = True
query_start, query_end = None, None
subject_start, subject_end = None, None
query_strand, subject_strand = None, None
score, expect, identity = None, None, None
if line.startswith('Length='):
length = int(line.split('=')[-1].strip())
if in_query_def and query_name != previous_query:
if result is not None and result['matches']:
result = _fix_matches(result, score_keys=['expect',
'score'])
if result:
yield result
query_length = length
in_query_def = False
if query_desc:
query = {'name': query_name, 'description': query_desc,
'length': query_length}
else:
query = {'name': query_name, 'length': query_length}
matches = []
result = {'query': query,
'matches': matches}
previous_query = query_name
elif in_subject_def:
subject_length = length
if subject_desc:
subject = {'name': subject_name,
'description': subject_desc,
'length': subject_length}
else:
subject = {'name': subject_name,
'length': subject_length}
in_subject_def = False
matches.append({'subject': subject, 'match_parts': []})
if subject_name is None:
continue
if line.startswith('Score') or line.startswith('Effective'):
if score is not None:
match_part = {'subject_start': subject_start,
'subject_end': subject_end,
'subject_strand': subject_strand,
'query_start': query_start,
'query_end': query_end,
'query_strand': query_strand,
'scores': {'expect': expect,
'identity': identity,
'score': score}}
matches[-1]['match_parts'].append(match_part)
score, expect, identity = None, None, None
query_strand, subject_strand = None, None
query_start, query_end = None, None
subject_start, subject_end = None, None
if line.startswith('Score'):
items = line.split()
score = float(items[2])
expect = float(items[-1])
elif line.startswith('Identities'):
items = line.split()
identity = float(items[3].strip('(')[:-3])
elif line.startswith('Strand'):
strands = line.split('=')[-1]
strands = strands.split('/')
query_strand = 1 if strands[0] == 'Plus' else -1
subject_strand = 1 if strands[1] == 'Plus' else -1
if query_strand and line.startswith('Query'):
items = line.split()
if query_start is None:
query_start = int(items[1]) - 1
query_end = int(items[-1]) - 1
if query_strand and line.startswith('Sbjct'):
items = line.split()
if subject_start is None:
subject_start = int(items[1]) - 1
subject_end = int(items[-1]) - 1
else:
if result is not None and result['matches']:
result = _fix_matches(result, score_keys=['expect', 'score'])
if result:
yield result
class TextBlastParser(object):
'It parses the tabular output of a blast result'
def __init__(self, fhand):
'The init requires a file to be parsed'
self._gen = _text_blast_parser(fhand)
def __iter__(self):
'Part of the iterator protocol'
return self
def next(self):
'It returns the next blast result'
return self._gen.next()
DEFAULT_TABBLAST_FORMAT = ('query', 'subject', 'identity', 'alignment_length',
'mismatches', 'gap_open', 'query_start',
'query_end', 'subject_start', 'subject_end',
'expect', 'score')
def _lines_for_every_tab_blast(fhand, line_format):
'It returns the lines for every query in the tabular blast'
ongoing_query = None
match_parts = []
for line in fhand:
items = line.strip().split()
if len(line_format) != len(items):
msg = 'Malformed line. The line has an unexpected number of items.'
msg += '\nExpected format was: ' + ' '.join(line_format) + '\n'
msg += 'Line was: ' + line + '\n'
raise RuntimeError(msg)
items = dict(zip(line_format, items))
query = items['query']
subject = items['subject']
if 'query_length' in items:
query_len = int(items['query_length'])
else:
query_len = None
if 'subject_length' in items:
subject_len = int(items['subject_length'])
else:
subject_len = None
locations = ('query_start', 'query_end', 'subject_start',
'subject_end')
match_part = {}
for field in locations:
if field in items:
match_part[field] = int(items[field]) - 1
score_fields = ('expect', 'score', 'identity')
scores = {}
for field in score_fields:
if field in items:
scores[field] = float(items[field])
if scores:
match_part['scores'] = scores
if ongoing_query is None:
ongoing_query = query
match_parts.append({'subject': subject, 'match_part': match_part,
'subject_length': subject_len})
elif query == ongoing_query:
match_parts.append({'subject': subject, 'match_part': match_part,
'subject_length': subject_len})
else:
yield ongoing_query, query_len, match_parts
match_parts = [{'subject':subject, 'match_part':match_part,
'subject_length': subject_len}]
ongoing_query = query
if ongoing_query:
yield ongoing_query, query_len, match_parts
def _group_match_parts_by_subject(match_parts):
'It yields lists of match parts that share the subject'
parts = []
ongoing_subject = None
for match_part in match_parts:
subject = match_part['subject']
subject_length = match_part['subject_length']
if ongoing_subject is None:
parts.append(match_part['match_part'])
ongoing_subject = subject
ongoing_subject_length = subject_length
elif ongoing_subject == subject:
parts.append(match_part['match_part'])
else:
yield ongoing_subject, ongoing_subject_length, parts
parts = [match_part['match_part']]
ongoing_subject = subject
ongoing_subject_length = subject_length
else:
yield ongoing_subject, ongoing_subject_length, parts
def _tabular_blast_parser(fhand, line_format):
'Parses the tabular output of a blast result and yields Alignment result'
if hasattr(fhand, 'seek'):
fhand.seek(0)
for qname, qlen, match_parts in _lines_for_every_tab_blast(fhand,
line_format):
matches = []
# pylint: disable=C0301
for sname, slen, match_parts in _group_match_parts_by_subject(match_parts):
# match start and end
match_start, match_end = None, None
match_subject_start, match_subject_end = None, None
for match_part in match_parts:
if (match_start is None or
match_part['query_start'] < match_start):
match_start = match_part['query_start']
if match_end is None or match_part['query_end'] > match_end:
match_end = match_part['query_end']
if (match_subject_start is None or
match_part['subject_start'] < match_subject_start):
match_subject_start = match_part['subject_start']
if (match_subject_end is None or
match_part['subject_end'] > match_subject_end):
match_subject_end = match_part['subject_end']
subject = {'name': sname}
if slen:
subject['length'] = slen
match = {'subject': subject,
'start': match_start,
'end': match_end,
'subject_start': match_subject_start,
'subject_end': match_subject_end,
'scores': {'expect': match_parts[0]['scores']['expect']},
'match_parts': match_parts}
matches.append(match)
if matches:
query = {'name': qname}
if qlen:
query['length'] = qlen
yield {'query': query, 'matches': matches}
class TabularBlastParser(object):
'It parses the tabular output of a blast result'
def __init__(self, fhand, line_format=DEFAULT_TABBLAST_FORMAT):
'The init requires a file to be parsed'
self._gen = _tabular_blast_parser(fhand, line_format)
def __iter__(self):
'Part of the iterator protocol'
return self
def next(self):
'It returns the next blast result'
return self._gen.next()
class BlastParser(object):
'''An iterator blast parser that yields the blast results in a
multiblast file'''
def __init__(self, fhand, subj_def_as_accesion=None):
'The init requires a file to be parsed'
fhand.seek(0, 0)
sample = fhand.read(10)
if sample and 'xml' not in sample:
raise ValueError('Not a xml file')
fhand.seek(0, 0)
self._blast_file = fhand
metadata = self._get_blast_metadata()
blast_version = metadata['version']
plus = metadata['plus']
self.db_name = metadata['db_name']
self._blast_file.seek(0, 0)
if ((blast_version and plus) or
(blast_version and blast_version > '2.2.21')):
self.use_query_def_as_accession = True
self.use_subject_def_as_accession = True
else:
self.use_query_def_as_accession = True
self.use_subject_def_as_accession = False
if subj_def_as_accesion is not None:
self.use_subject_def_as_accession = subj_def_as_accesion
# we use the biopython parser
# if there are no results we put None in our blast_parse results
self._blast_parse = None
if fhand.read(1) == '<':
fhand.seek(0)
self._blast_parse = NCBIXML.parse(fhand)
def __iter__(self):
'Part of the iterator protocol'
return self
def _create_result_structure(self, bio_result):
'Given a BioPython blast result it returns our result structure'
# the query name and definition
definition = bio_result.query
if self.use_query_def_as_accession:
items = definition.split(' ', 1)
name = items[0]
if len(items) > 1:
definition = items[1]
else:
definition = None
else:
name = bio_result.query_id
definition = definition
if definition is None:
definition = "<unknown description>"
# length of query sequence
length = bio_result.query_letters
# now we can create the query sequence
query = {'name': name, 'description': definition, 'length': length}
# now we go for the hits (matches)
matches = []
for alignment in bio_result.alignments:
# the subject sequence
if self.use_subject_def_as_accession:
items = alignment.hit_def.split(' ', 1)
name = items[0]
if len(items) > 1:
definition = items[1]
else:
definition = None
else:
name = alignment.accession
definition = alignment.hit_def
if definition is None:
definition = "<unknown description>"
length = alignment.length
id_ = alignment.hit_id
subject = {'name': name, 'description': definition,
'length': length, 'id': id_}
# the hsps (match parts)
match_parts = []
match_start, match_end = None, None
match_subject_start, match_subject_end = None, None
for hsp in alignment.hsps:
expect = hsp.expect
subject_start = hsp.sbjct_start
subject_end = hsp.sbjct_end
query_start = hsp.query_start
query_end = hsp.query_end
hsp_length = len(hsp.query)
# We have to check the subject strand
if subject_start < subject_end:
subject_strand = 1
else:
subject_strand = -1
subject_start, subject_end = (subject_end,
subject_start)
# Also the query strand
if query_start < query_end:
query_strand = 1
else:
query_strand = -1
query_start, query_end = query_end, query_start
try:
similarity = hsp.positives * 100.0 / float(hsp_length)
except TypeError:
similarity = None
try:
identity = hsp.identities * 100.0 / float(hsp_length)
except TypeError:
identity = None
match_parts.append({'subject_start': subject_start,
'subject_end': subject_end,
'subject_strand': subject_strand,
'query_start': query_start,
'query_end': query_end,
'query_strand': query_strand,
'scores': {'similarity': similarity,
'expect': expect,
'identity': identity}
})
# It takes the first loc and the last loc of the hsp to
# determine hit start and end
if match_start is None or query_start < match_start:
match_start = query_start
if match_end is None or query_end > match_end:
match_end = query_end
if (match_subject_start is None or
subject_start < match_subject_start):
match_subject_start = subject_start
if (match_subject_end is None or
subject_end > match_subject_end):
match_subject_end = subject_end
matches.append({
'subject': subject,
'start': match_start,
'end': match_end,
'subject_start': match_subject_start,
'subject_end': match_subject_end,
'scores': {'expect': match_parts[0]['scores']['expect']},
'match_parts': match_parts})
result = {'query': query, 'matches': matches}
return result
def _get_blast_metadata(self):
'It gets blast parser version'
tell_ = self._blast_file.tell()
version = None
db_name = None
plus = False
for line in self._blast_file:
line = line.strip()
if line.startswith('<BlastOutput_version>'):
version = line.split('>')[1].split('<')[0].split()[1]
if line.startswith('<BlastOutput_db>'):
db_name = line.split('>')[1].split('<')[0]
db_name = os.path.basename(db_name)
if version is not None and db_name is not None:
break
if version and '+' in version:
plus = True
version = version[:-1]
self._blast_file.seek(tell_)
return {'version': version, 'plus': plus, 'db_name': db_name}
def next(self):
'It returns the next blast result'
if self._blast_parse is None:
raise StopIteration
else:
bio_result = self._blast_parse.next()
# now we have to change this biopython blast_result in our
# structure
our_result = self._create_result_structure(bio_result)
return our_result
class ExonerateParser(object):
'''Exonerate parser, it is a iterator that yields the result for each
query separated'''
def __init__(self, fhand):
'The init requires a file to be parser'
self._fhand = fhand
self._exonerate_results = self._results_query_from_exonerate()
def __iter__(self):
'Part of the iterator protocol'
return self
def _results_query_from_exonerate(self):
'''It takes the exonerate cigar output file and yields the result for
each query. The result is a list of match_parts '''
self._fhand.seek(0, 0)
cigar_dict = {}
for line in self._fhand:
if not line.startswith('cigar_like:'):
continue
items = line.split(':', 1)[1].strip().split()
query_id = items[0]
if query_id not in cigar_dict:
cigar_dict[query_id] = []
cigar_dict[query_id].append(items)
for query_id, values in cigar_dict.items():
yield values
@staticmethod
def _create_structure_result(query_result):
'''It creates the result dictionary structure giving a list of
match_parts of a query_id '''
# TODO add to the match the match subject start and end
struct_dict = {}
query_name = query_result[0][0]
query_length = int(query_result[0][9])
query = {'name': query_name, 'length': query_length}
struct_dict['query'] = query
struct_dict['matches'] = []
for match_part_ in query_result:
(query_name, query_start, query_end, query_strand, subject_name,
subject_start, subject_end, subject_strand, score, query_length,
subject_length, similarity) = match_part_
query_start = int(query_start)
# they number the positions between symbols
# A C G T
# 0 1 2 3 4
# Hence the subsequence "CG" would have start=1, end=3, and length=2
# but we would say start=1 and end=2
query_end = int(query_end) - 1
subject_start = int(subject_start)
subject_end = int(subject_end) - 1
query_strand = _strand_transform(query_strand)
subject_strand = _strand_transform(subject_strand)
score = int(score)
similarity = float(similarity)
# For each line , It creates a match part dict
match_part = {}
match_part['query_start'] = query_start
match_part['query_end'] = query_end
match_part['query_strand'] = query_strand
match_part['subject_start'] = subject_start
match_part['subject_end'] = subject_end
match_part['subject_strand'] = subject_strand
match_part['scores'] = {'score': score, 'similarity': similarity}
# Check if the match is already added to the struct. A match is
# defined by a list of part matches between a query and a subject
match_num = _match_num_if_exists_in_struc(subject_name,
struct_dict)
if match_num is not None:
match = struct_dict['matches'][match_num]
if match['start'] > query_start:
match['start'] = query_start
if match['end'] < query_end:
match['end'] = query_end
if match['scores']['score'] < score:
match['scores']['score'] = score
match['match_parts'].append(match_part)
else:
match = {}
match['subject'] = {'name': subject_name,
'length': int(subject_length)}
match['start'] = query_start
match['end'] = query_end
match['scores'] = {'score': score}
match['match_parts'] = []
match['match_parts'].append(match_part)
struct_dict['matches'].append(match)
return struct_dict
def next(self):
'''It return the next exonerate hit'''
query_result = self._exonerate_results.next()
return self._create_structure_result(query_result)
def _strand_transform(strand):
'''It transfrom the +/- strand simbols in our user case 1/-1 caracteres '''
if strand == '-':
return -1
elif strand == '+':
return 1
def _match_num_if_exists_in_struc(subject_name, struct_dict):
'It returns the match number of the list of matches that is about subject'
for i, match in enumerate(struct_dict['matches']):
if subject_name == match['subject']['name']:
return i
return None
def get_alignment_parser(kind):
'''It returns a parser depending of the aligner kind '''
if 'blast_tab' == kind:
parser = TabularBlastParser
elif 'blast_text' == kind:
parser = TextBlastParser
elif 'blast' in kind:
parser = BlastParser
else:
parsers = {'exonerate': ExonerateParser}
parser = parsers[kind]
return parser
def get_match_score(match, score_key, query=None, subject=None):
'''Given a match it returns its score.
It tries to get the score from the match, if it's not there it goes for
the first match_part.
It can also be a derived score like the incompatibility. All derived scores
begin with d_
'''
# the score can be in the match itself or in the first
# match_part
if score_key in match['scores']:
score = match['scores'][score_key]
else:
# the score is taken from the best hsp (the first one)
score = match['match_parts'][0]['scores'][score_key]
return score
def get_match_scores(match, score_keys, query, subject):
'''It returns the scores for one match.
scores should be a list and it will return a list of scores.
'''
scores_res = []
for score_key in score_keys:
score = get_match_score(match, score_key, query, subject)
scores_res.append(score)
return scores_res
def alignment_results_scores(results, scores, filter_same_query_subject=True):
'''It returns the list of scores for all results.
For instance, for a blast a generator with all e-values can be generated.
By default, the results with the same query and subject will be filtered
out.
The scores can be a single one or a list of them.
'''
# for each score we want a list to gather the results
score_res = []
for score in scores:
score_res.append([])
for result in results:
query = result['query']
for match in result['matches']:
subject = match['subject']
if (filter_same_query_subject and query is not None and subject is
not None and query['name'] == subject['name']):
continue
# all the scores for this match
score_values = get_match_scores(match, scores, query, subject)
# we append each score to the corresponding result list
for index, value in enumerate(score_values):
score_res[index].append(value)
if len(score_res) == 1:
return score_res[0]
else:
return score_res
def build_relations_from_aligment(fhand, query_name, subject_name):
'''It returns a relations dict given an alignment in markx10 format
The alignment must be only between two sequences query against subject
'''
# we parse the aligment
in_seq_section = 0
seq, seq_len, al_start = None, None, None
for line in fhand:
line = line.strip()
if not line:
continue
if line[0] == '>' and line[1] != '>':
if in_seq_section:
seq = {'seq': seq,
'length': seq_len,
'al_start': al_start - 1,
'name': query_name}
if in_seq_section == 1:
seq0 = seq
in_seq_section += 1
seq = ''
continue
if not in_seq_section:
continue
if '; sq_len:' in line:
seq_len = int(line.split(':')[-1])
if '; al_display_start:' in line:
al_start = int(line.split(':')[-1])
if line[0] not in (';', '#'):
seq += line
seq1 = {'seq': seq,
'length': seq_len,
'al_start': al_start - 1,
'name': subject_name}
# now we get the segments
gap = '-'
pos_seq0 = seq0['al_start']
pos_seq1 = seq1['al_start']
segment_start = None
segments = []
for ali_pos in range(len(seq1['seq'])):
try:
nucl0, nucl1 = seq0['seq'][ali_pos + 1], seq1['seq'][ali_pos + 1]
if (nucl0 == gap or nucl1 == gap) and segment_start:
do_segment = True
segment_end = pos_seq0 - 1, pos_seq1 - 1
else:
do_segment = False
except IndexError:
do_segment = True
segment_end = pos_seq0, pos_seq1
if do_segment:
segment = {seq0['name']: (segment_start[0], segment_end[0]),
seq1['name']: (segment_start[1], segment_end[1]), }
segments.append(segment)
segment_start = None
if nucl0 != gap and nucl1 != gap and segment_start is None:
segment_start = pos_seq0, pos_seq1
if nucl0 != gap:
pos_seq0 += 1
if nucl1 != gap:
pos_seq1 += 1
relations = {}
for seg in segments:
for seq_name, limits in seg.items():
if seq_name not in relations:
relations[seq_name] = []
relations[seq_name].append(limits)
return relations
def _get_match_score(match, score_key, query=None, subject=None):
'''Given a match it returns its score.
It tries to get the score from the match, if it's not there it goes for
the first match_part.
'''
# the score can be in the match itself or in the first
# match_part
if score_key in match['scores']:
score = match['scores'][score_key]
else:
# the score is taken from the best hsp (the first one)
score = match['match_parts'][0]['scores'][score_key]
return score
def _score_above_threshold(score, min_score, max_score, log_tolerance,
log_best_score):
'It checks if the given score is a good one'
if log_tolerance is None:
if min_score is not None and score >= min_score:
match_ok = True
elif max_score is not None and score <= max_score:
match_ok = True
else:
match_ok = False
else:
if max_score is not None and score == 0.0:
match_ok = True
elif min_score is not None and score <= min_score:
match_ok = False
elif max_score is not None and score >= max_score:
match_ok = False
elif abs(log10(score) - log_best_score) < log_tolerance:
match_ok = True
else:
match_ok = False
return match_ok
def _create_scores_mapper_(score_key, score_tolerance=None,
max_score=None, min_score=None):
'It creates a mapper that keeps only the best matches'
if score_tolerance is not None:
log_tolerance = log10(score_tolerance)
else:
log_tolerance = None
def map_(alignment):
'''It returns an alignment with the best matches'''
if alignment is None:
return None
if log_tolerance is None:
log_best_score = None
else:
# score of the best match
try:
best_match = alignment['matches'][0]
best_score = _get_match_score(best_match, score_key)
if best_score == 0.0:
log_best_score = 0.0
else:
log_best_score = log10(best_score)
except IndexError:
log_best_score = None
filtered_matches = []
for match in alignment['matches']:
filtered_match_parts = []
for match_part in match['match_parts']:
score = match_part['scores'][score_key]
if _score_above_threshold(score, min_score, max_score,
log_tolerance, log_best_score):
filtered_match_parts.append(match_part)
match['match_parts'] = filtered_match_parts
if not len(match['match_parts']):
continue
# is this match ok?
match_score = get_match_score(match, score_key)
if _score_above_threshold(match_score, min_score, max_score,
log_tolerance, log_best_score):
filtered_matches.append(match)
alignment['matches'] = filtered_matches
return alignment
return map_
def _create_best_scores_mapper(score_key, score_tolerance=None,
max_score=None, min_score=None):
'It creates a mapper that keeps only the best matches'
return _create_scores_mapper_(score_key, score_tolerance=score_tolerance,
max_score=max_score, min_score=min_score)
def _create_scores_mapper(score_key, max_score=None, min_score=None):
'It creates a mapper that keeps only the best matches'
if max_score is None and min_score is None:
raise ValueError('Either max_score or min_score should be given')
return _create_scores_mapper_(score_key, max_score=max_score,
min_score=min_score)
def _create_deepcopy_mapper():
'It creates a mapper that does a deepcopy of the alignment'
def map_(alignment):
'It does the deepcopy'
return copy.deepcopy(alignment)
return map_
def _create_empty_filter():
'It creates a filter that removes the false items'
def filter_(alignment):
'It filters the empty alignments'
if alignment:
return True
else:
return False
return filter_
def _fix_match_scores(match, score_keys):
'Given a match it copies the given scores from the first match_part'
scores = {}
if not match['match_parts']:
return
match_part = match['match_parts'][0]
for key in score_keys:
scores[key] = match_part['scores'][key]
match['scores'] = scores
def _fix_match_start_end(match):
'Given a match it fixes the start and end based on the match_parts'
match_start, match_end = None, None
match_subject_start, match_subject_end = None, None
for match_part in match['match_parts']:
if ('query_start' in match_part and
(match_start is None or
match_part['query_start'] < match_start)):
match_start = match_part['query_start']
if ('query_end' in match_part and
(match_end is None or match_part['query_end'] > match_end)):
match_end = match_part['query_end']
if ('subject_start' in match_part and
(match_subject_start is None or
match_part['subject_start'] < match_subject_start)):
match_subject_start = match_part['subject_start']
if ('subject_end' in match_part and
(match_subject_end is None or
match_part['subject_end'] > match_subject_end)):
match_subject_end = match_part['subject_end']
if match_start is not None:
match['start'] = match_start
if match_end is not None:
match['end'] = match_end
if match_subject_start is not None:
match['subject_start'] = match_subject_start
if match_subject_end is not None:
match['subject_end'] = match_subject_end
def _fix_matches(alignment, score_keys=None):
'It removes the empty match_parts and the alignments with no matches'
if alignment is None:
return None
new_matches = []
for match in alignment['matches']:
if len(match['match_parts']):
if score_keys:
_fix_match_scores(match, score_keys)
_fix_match_start_end(match)
new_matches.append(match)
if not new_matches:
return None
else:
alignment['matches'] = new_matches
return alignment
def _create_fix_matches_mapper():
''''It creates a function that removes alignments with no matches.
It also removes matches with no match_parts
'''
return _fix_matches
def covered_segments_from_match_parts(match_parts, in_query=True,
merge_segments_closer=1):
'''Given a list of match_parts it returns the covered segments.
match_part 1 ------- -----> -----------
match_part 2 ------
It returns the list of segments covered by the match parts either in the
query or in the subject.
merge_segments_closer is an integer. Segments closer than the given
number of residues will be merged.
'''
# we collect all start and ends
segments = []
for match_part in match_parts:
if in_query:
start = match_part['query_start']
end = match_part['query_end']
else:
start = match_part['subject_start']
end = match_part['subject_end']
if start > end: # a revesed item
start, end = end, start
segments.append((start, end))
return merge_overlaping_segments(segments,
merge_segments_closer=merge_segments_closer)
def elongate_match_part_till_global(match_part, query_length, subject_length,
align_completely):
'''It streches the match_part to convert it in a global alignment.
We asume that the subject or the query should be completely aligned and we
strech the match part to do it.
The elongated match_parts will be marked unless the segment added is
shorter than the mark_strech_longer integer.
'''
assert align_completely in (SUBJECT, QUERY)
# start and ends
if match_part['subject_start'] <= match_part['subject_end']:
subject_start = match_part['subject_start']
subject_end = match_part['subject_end']
subject_rev = False
else:
subject_start = match_part['subject_end']
subject_end = match_part['subject_start']
subject_rev = True
if match_part['query_start'] <= match_part['query_end']:
query_start = match_part['query_start']
query_end = match_part['query_end']
query_rev = False
else:
query_start = match_part['query_end']
query_end = match_part['query_start']
query_rev = True
# how much do we elongate?
if align_completely == SUBJECT:
stretch_left = subject_start
max_left_strecth = query_start
stretch_right = subject_length - subject_end - 1
max_right_stretch = query_length - query_end - 1
else:
stretch_left = query_start
max_left_strecth = subject_start
stretch_right = query_length - query_end - 1
max_right_stretch = subject_length - subject_end - 1
if stretch_left > max_left_strecth:
stretch_left = max_left_strecth
if stretch_right > max_right_stretch:
stretch_right = max_right_stretch
# The elongation
if subject_rev:
match_part['subject_end'] -= stretch_left
else:
match_part['subject_start'] -= stretch_left
if query_rev:
match_part['query_end'] -= stretch_left
else:
match_part['query_start'] -= stretch_left
if subject_rev:
match_part['subject_start'] += stretch_right
else:
match_part['subject_end'] += stretch_right
if query_rev:
match_part['query_start'] += stretch_right
else:
match_part['query_end'] += stretch_right
# The taggin
streched_length = stretch_left + stretch_right
if streched_length:
match_part[ELONGATED] = streched_length
# reverse
def elongate_match_parts_till_global(match_parts, query_length,
subject_length, align_completely):
'''It streches the match_part to convert it in a global alignment.
We assume that the subject should be completely aligned and we stretch the
match part to do it.
The elongated match_parts will be marked unless the segment added is
shorter than the mark_strech_longer integer.
'''
return [elongate_match_part_till_global(mp, query_length, subject_length,
align_completely=align_completely)
for mp in match_parts]
def _match_length(match, length_from_query):
'''It returns the match length.
It does take into account only the length covered by match_parts.
'''
segments = covered_segments_from_match_parts(match['match_parts'],
length_from_query)
length = 0
for segment in segments:
match_part_len = segment[1] - segment[0] + 1
length += match_part_len
return length
def _match_part_length(match_part, length_in_query):
'It calculates the length of the match part'
if length_in_query:
return abs(match_part['query_end'] - match_part['query_start'])
else:
return abs(match_part['subject_end'] - match_part['subject_start'])
def _match_long_enough(match_length, total_length, min_num_residues,
min_percentage, length_in_query):
'It returns a boolean if the criteria is met'
if min_num_residues is not None:
if match_length >= min_num_residues:
match_ok = True
else:
match_ok = False
else:
percentage = (match_length / total_length) * 100.0
if percentage >= min_percentage:
match_ok = True
else:
match_ok = False
return match_ok
def _create_min_length_mapper(length_in_query, min_num_residues=None,
min_percentage=None, filter_match_parts=False):
'''It creates a mapper that removes short matches.
The length can be given in percentage or in number of residues.
The length can be from the query or the subject
filter_match_parts determines if every individual match_part is to be
filtered against the length requirement
'''
if not isinstance(length_in_query, bool):
raise ValueError('length_in_query should be a boolean')
if min_num_residues is None and min_percentage is None:
raise ValueError('min_num_residues or min_percentage should be given')
elif min_num_residues is not None and min_percentage is not None:
msg = 'Both min_num_residues or min_percentage can not be given at the'
msg += ' same time'
raise ValueError(msg)
def map_(alignment):
'''It returns an alignment with the matches that span long enough'''
if alignment is None:
return None
filtered_matches = []
query = alignment.get('query', None)
for match in alignment['matches']:
if match is None:
continue
if min_num_residues is None:
if length_in_query:
mol_length = query['length']
else:
mol_length = match['subject']['length']
else:
mol_length = None # it doesn't matter because we're after an
# absolute value
if filter_match_parts:
filtered_match_parts = []
for match_part in match['match_parts']:
match_part_length = _match_part_length(match_part,
length_in_query)
match_part_ok = _match_long_enough(match_part_length,
mol_length,
min_num_residues,
min_percentage,
length_in_query)
if match_part_ok:
filtered_match_parts.append(match_part)
match['match_parts'] = filtered_match_parts
if not len(match['match_parts']):
continue
filtered_matches.append(match)
else:
match_length = _match_length(match, length_in_query)
match_ok = _match_long_enough(match_length, mol_length,
min_num_residues,
min_percentage,
length_in_query)
if match_ok:
filtered_matches.append(match)
alignment['matches'] = filtered_matches
return alignment
return map_
MAPPER = 1
FILTER = 2
FILTER_COLLECTION = {'best_scores':
{'funct_factory': _create_best_scores_mapper,
'kind': MAPPER},
'score_threshold':
{'funct_factory': _create_scores_mapper,
'kind': MAPPER},
'min_length': {'funct_factory': _create_min_length_mapper,
'kind': MAPPER},
'deepcopy': {'funct_factory': _create_deepcopy_mapper,
'kind': MAPPER},
'fix_matches':
{'funct_factory': _create_fix_matches_mapper,
'kind': MAPPER},
'filter_empty':
{'funct_factory': _create_empty_filter,
'kind': FILTER},
}
def filter_alignments(alignments, config):
'''It filters and maps the given alignments.
The filters and maps to use will be decided based on the configuration.
'''
config = copy.deepcopy(config)
config.insert(0, {'kind': 'deepcopy'})
config.append({'kind': 'fix_matches'})
config.append({'kind': 'filter_empty'})
# create the pipeline
for conf in config:
funct_fact = FILTER_COLLECTION[conf['kind']]['funct_factory']
kind = FILTER_COLLECTION[conf['kind']]['kind']
del conf['kind']
function = funct_fact(**conf)
if kind == MAPPER:
alignments = itertools.imap(function, alignments)
else:
alignments = itertools.ifilter(function, alignments)
return alignments
|
gpl-3.0
| 7,515,614,803,057,729,000
| 37.778396
| 83
| 0.540637
| false
| 4.268404
| false
| false
| false
|
bjmain/host_choice_GWAS_arabiensis
|
PCA-based_Fst/fst_by_chr_plot.2.py
|
1
|
9451
|
#!/usr/bin/python
import matplotlib as MPL
MPL.use('agg') # no X (so show won't work)
from matplotlib.figure import Figure
from matplotlib.patches import Rectangle
#from matplotlib import rc #for adding italics. Via latex style
#rc('text', usetex=True)
import pylab as P
import math
import numpy
import commands
import sys
from scipy import stats
DATA_DIR='/mnt/lanzarobas/home/bradmain/arabiensis/VCFs/'
FST_LIM = [-0.05, 0.25]
DSTAT_LIM = [-40, 60]
#FST_COLOR = 'b'
FST_SIG_COLOR = 'b'
DSTAT_COLOR = 'r'
INV_HEIGHT=0.05
#TITLE="Sequence Differentiation Between Homozygous 2Rb Inversion States (PCA3 Split)"
TITLE="2) Genome-wide FST (sliding windows)\nbetween PCA Clusters"
LEG_LINES = []
LEG_LABELS = []
#input windowed FST from vcftools
fig, axes = P.subplots(ncols=2,nrows=3)
fig.subplots_adjust(left=None, bottom=None, right=None, top=None,
wspace=.01, hspace=None)
((N,chrX), (chr2R, chr2L), (chr3R, chr3L)) = axes
#((chrX,N), (chr2R, chr2L), (chr3R, chr3L)) = axes
#N.axis('off')
"""
#Second Y axis
chrXd = chrX.twinx()
chr2Rd = chr2R.twinx()
chr2Ld = chr2L.twinx()
chr3Rd = chr3R.twinx()
chr3Ld = chr3L.twinx()
"""
def smoothListGaussian(list,strippedXs=False,degree=5):
window=degree*2-1
weight=numpy.array([1.0]*window)
weightGauss=[]
for i in range(window):
i=i-degree+1
frac=i/float(window)
gauss=1/(numpy.exp((4*(frac))**2))
weightGauss.append(gauss)
weight=numpy.array(weightGauss)*weight
smoothed=[0.0]*(len(list)-window)
for i in range(len(smoothed)):
smoothed[i]=sum(numpy.array(list[i:i+window])*weight)/sum(weight)
return smoothed
inversions=["2Rc","2Rb","2La"]
## plot inversions
inv={}
for line in open("/mnt/lanzarobas/home/bradmain/gambiae/gene_flow/pest_M/An_gambiae_karyotype.gtf"):
i=line.strip().split()
chr=i[0]
l=int(i[3])
r=int(i[4])
name=i[9].strip(";").strip('"')
if name not in inversions:
continue
num=int(i[-1].strip(";").strip('"'))
if chr not in inv:
inv[chr]={}
if name not in inv[chr]:
inv[chr][name]={}
inv[chr][name][num]=[l/1.0e6,r/1.0e6]
outer=[inv["2R"]["2Rb"][1][0],inv["2R"]["2Rb"][2][1]]
inner=[inv["2R"]["2Rb"][1][1],inv["2R"]["2Rb"][2][0]]
Couter=[inv["2R"]["2Rc"][1][0],inv["2R"]["2Rc"][2][1]]
Cinner=[inv["2R"]["2Rc"][1][1],inv["2R"]["2Rc"][2][0]]
outer2La=[inv["2L"]["2La"][1][0],inv["2L"]["2La"][2][1]]
inner2La=[inv["2L"]["2La"][1][1],inv["2L"]["2La"][2][0]]
#for N in inv["2R"]["2Rb"]:
# outer.append(inv["2R"]["2Rb"][N][1])
# inner.append(inv["2R"]["2Rb"][N][0])
print 'outer',outer
print 'inner',inner
#chr2R.plot(outer,[INV_HEIGHT,INV_HEIGHT],'k-',linewidth=5,alpha=0.5)
#chr2R.plot(inner,[INV_HEIGHT,INV_HEIGHT],'y-',linewidth=5)
#chr2R.plot(Couter,[INV_HEIGHT,INV_HEIGHT],'k-',linewidth=5,alpha=0.5)
#chr2R.plot(Cinner,[INV_HEIGHT,INV_HEIGHT],'g-',linewidth=5)
#chr2L.plot(outer2La,[INV_HEIGHT,INV_HEIGHT],'k-',linewidth=5,alpha=0.5)
#chr2L.plot(inner2La,[INV_HEIGHT,INV_HEIGHT],'y-',linewidth=5)
#chr3R.plot([12.5,38],[INV_HEIGHT,INV_HEIGHT],'y-',linewidth=5,alpha=0.5)
chr2R.plot(outer,[INV_HEIGHT,INV_HEIGHT],'k-',linewidth=15,alpha=0.5)
chr2R.plot(inner,[INV_HEIGHT,INV_HEIGHT],'y-',linewidth=15,label='2Rb inversion')
chrX.plot(inner,[INV_HEIGHT+1000,INV_HEIGHT+1000],'y-',linewidth=15,label='2Rb inversion') #just plotting out of range on X for legend purposes
chr2R.text(numpy.mean(inner)-.5,INV_HEIGHT-0.01,'b',fontweight='bold',fontsize=14)
chr2R.plot(Couter,[INV_HEIGHT,INV_HEIGHT],'k-',linewidth=15,alpha=0.5)
chr2R.plot(Cinner,[INV_HEIGHT,INV_HEIGHT],'g-',linewidth=15,label='2Rc inversion')
chrX.plot(Cinner,[INV_HEIGHT+1000,INV_HEIGHT+1000],'g-',linewidth=15,label='2Rc inversion') #just plotting out of range on X for legend purposes
chr2R.text(numpy.mean(Cinner)-.5,INV_HEIGHT-0.01,'c',fontweight='bold',fontsize=14)
#chr2L.plot(outer2La,[INV_HEIGHT,INV_HEIGHT],'k-',linewidth=15,alpha=0.5)
#chr2L.plot(inner2La,[INV_HEIGHT,INV_HEIGHT],'y-',linewidth=15)
chr3R.plot([12.5,38],[INV_HEIGHT,INV_HEIGHT],'r-',linewidth=15,alpha=0.5,label='3Ra inversion')
chrX.plot([12.5+1000,38+1000],[INV_HEIGHT,INV_HEIGHT],'r-',linewidth=15,alpha=0.5,label='3Ra inversion') #just plotting out of range on X for legend purposes
chr3R.text(numpy.mean([12.5,38]),INV_HEIGHT-0.01,'a',fontsize=14,fontweight='bold')
#chr3R.legend()
or7=[22.849252,22.858650]
or40=[22.823983,22.825656]
gr53=[24.694665,24.698605]
gr13=[24.811173,24.812613]
or39=[24.850239,24.851846]
or38=[24.857474,24.859095]
def fst_plotter(fst_files,FST_COLOR,style,newLEGEND):
fstD={}
fstmean={}
leg_done = False
for file in fst_files:
for line in open(file):
i=line.strip().split()
chr=i[0]
#skip unknown and Y chromosomes
if chr=="CHROM" or chr=="UNKN" or chr=="Y_unplaced":
continue
if chr not in fstD:
fstD[chr]={}
fstmean[chr]={}
pos=int(i[1])+24999 #moves x position to middle of 50kb bin
if i[2]=="-nan":
continue
fst=float(i[4]) #i[4] is the weighted fst
fstM=float(i[5]) #i[5] is the mean fst
if pos not in fstD[chr]:
fstD[chr][pos]=fst
fstmean[chr][pos]=fstM
F=[]
Fs=[]
for CHROM in fstD:
x=numpy.array(sorted(fstD[CHROM]))
xmean=sorted(fstmean[CHROM])
y=[]
ymean=[]
for i in x:
F.append(fstD[CHROM][i])
y.append(fstD[CHROM][i])
ymean.append(fstmean[CHROM][i])
ax = globals()['chr'+CHROM]
#tmp, = ax.plot(x/1.0e6, y, '-', color=FST_COLOR, linewidth=1.5)
#tmp, = ax.plot(x/1.0e6, y, style, color=FST_COLOR, linewidth=1.5,label=newLEGEND)
tmp, = ax.plot(x/1.0e6, y, style, color=FST_COLOR, linewidth=2,label=newLEGEND)
#if( not leg_done ):
# LEG_LINES.append(tmp)
# LEG_LABELS.append(r"$F_{\mathrm{ST}}$ pre- vs post-2006 $A. coluzzii$")
# leg_done = True
chrX.legend(fontsize=12)
#LEG_LINES.append(leg_fst_sig)
#LEG_LABELS.append(r"$F_{\mathrm{ST}}$ 99.9 percentile level")
# actually plot fst (on top)
#fst_plotter([DATA_DIR+"pca1_pca2.windowed.weir.fst"],'b','--', "PCA1 vs PCA2")
#fst_plotter([DATA_DIR+"pca3_pca2.windowed.weir.fst"],'k','-', "PCA3 vs PCA2")
#fst_plotter(["pca1_pca2.windowed.weir.fst"],'b','--', "PCA1 vs PCA2")
#fst_plotter(["pca3_pca2.windowed.weir.fst"],'k','-', "PCA3 vs PCA2")
fst_plotter(["pca3_pca1.windowed.weir.fst"],'orange','--', "Right PCA cluster vs left")
fst_plotter(["pca1_pca2.windowed.weir.fst"],'green','--', "Left PCA cluster vs middle")
fst_plotter(["pca3_pca2.windowed.weir.fst"],'k','--', "Right PCA cluster vs middle")
# chromosome names
for C in ['X', '2R', '2L', '3R', '3L']:
ax = globals()['chr'+C]
if( C[-1] == 'L' ):
x = 0.975
ha = 'right'
else:
x = 0.025
ha = 'left'
#ax.text(x, 0.95, r'\textbf{'+C+'}', size='xx-large', ha=ha, va='top', transform=ax.transAxes)
ax.text(x, 0.95, C, size='xx-large', ha=ha, va='top', transform=ax.transAxes)
chrX.set_ylabel("$F_{\mathrm{ST}}$",color='k',fontsize=24)
chr2R.set_ylabel("$F_{\mathrm{ST}}$",color='k',fontsize=24)
chr3R.set_ylabel("$F_{\mathrm{ST}}$",color='k',fontsize=24)
chr3R.set_xlabel(r"position [Mb]",fontsize=24)
chr3L.set_xlabel(r"position [Mb]",fontsize=24)
chr2L.get_yaxis().set_visible(False)
chr3L.get_yaxis().set_visible(False)
chrX.set_ylim(FST_LIM)
chrX.set_xlim(0,22)
chr2L.set_ylim(FST_LIM)
chr2R.set_ylim(FST_LIM)
chr3L.set_ylim(FST_LIM)
chr3R.set_ylim(FST_LIM)
#P.show()
chrX.set_title(TITLE, y=1.04, fontsize=24)
##################### PCA PLOT
human=[line.strip() for line in open("../pca/allhumanfed.txt")]
cattle=[line.strip() for line in open("../pca/allcattlefed.txt")]
cattlex=[]
cattley=[]
humanx=[]
humany=[]
for line in open("../pca/LUPI_maf_pca.eigenvec"):
i=line.strip().split()
pc1=i[2]
pc2=i[4]
genome_id=i[0]
if i[1] in human:
humanx.append(pc1)
humany.append(pc2)
#ax.text(pc1,pc2,genome_id)
elif i[1] in cattle:
cattlex.append(pc1)
cattley.append(pc2)
#ax.text(pc1,pc2,genome_id)
else:
print "not human or cattle-fed:", line.strip()
gamx.append(pc1)
gamy.append(pc2)
###P.text(pc1,pc2,i[1],color='g',fontsize=14)
ax = N
ax.set_xlim(-.4,.3)
ax.set_ylim(-.35,.45)
pos = ax.get_position()
pts = pos.get_points()
w = pts[1,0]-pts[0,0]
h = pts[1,1]-pts[0,1]
nw = w*0.6
nh = h*0.8
#x0 = pts[0,0]+(w-nw)/2.0
x0 = pts[0,0]+(w-nw)/3.4
y0 = pts[0,1]+0.01 #+(h-nh)
print pts, w, h
ax.set_position([x0, y0, nw, nh])
ax.plot(cattlex,cattley,'bo',label="cattlefed")
ax.plot(humanx,humany,'ro',label="humanfed")
#P.text(-.38,-.3,"P<0.01; humanfed vs cattlefed 2x3 Fisher Exact")
ax.set_xlabel("PCA1",fontsize=14)
ax.set_ylabel("PCA2",fontsize=14)
ax.set_xlim(-.4,.3)
ax.set_ylim(-.35,.45)
leg = ax.legend(numpoints=1, ncol=2, loc=8, bbox_to_anchor=(0.5, 1.01))
leg.get_frame().set_alpha(0.5)
#P.title(r"PCA on all \textit{An. arabiensis} SNPs",fontsize=20)
ax.set_title("1) PCA on Genome-wide SNPs",fontsize=24, y=1.34)
################ Final adjustments and save
fig.set_size_inches(14.4, 9.6)
#P.show()
#P.savefig('pca_based_fst.1.svg', dpi=300)
P.savefig('pca_based_fst.2.png', dpi=300)
#P.savefig('pca_based_fst.1.pdf')
|
mit
| 2,981,110,976,975,777,000
| 32.753571
| 157
| 0.623214
| false
| 2.429563
| false
| false
| false
|
Phoenyx/TruemaxScriptPackage
|
Truemax/moduleScene.py
|
1
|
8187
|
__author__ = 'sofiaelm'
import os
from Truemax.checkNaming import get_top_node
from Truemax.hfFixShading import hfFixBadShading
import Truemax.makeReference as makeReference
import Truemax.exportFBX as exportFBX
import Truemax.deleteDPLayers as deleteDPLayers
import Truemax.fixAllBelow as fixAllBelow
from Truemax import checkList
import manager
import maya.cmds as cmds
from pymel.all import mel
import pymel.core as pm
from pymel.all import *
# Reloads script when update is ran
reload(fixAllBelow)
reload(exportFBX)
reload(checkList)
reload(deleteDPLayers)
reload(makeReference)
SCENE_FOLDER = "scenes"
TURNTABLE_FOLDER = "turnTable"
EXPORT_FOLDER = "export"
SOURCEIMAGES_FOLDER = "sourceimages"
# Gets first and last letter of username
def get_author_initials():
user = os.getenv('user', "na")
return str(user[0] + user[-1]).lower()
class ModuleScene(manager.Module):
cleanScene = "cleanScene"
def __init__(self, mngr):
manager.Module.__init__(self, mngr)
self.statusDir = None
if "assetslocation" in mngr.config:
self.statusDir = mngr.config["assetslocation"]
# Reset check status on selection
cmds.scriptJob(event=["DagObjectCreated", lambda *args: self.reset_check_list()], protected=True)
def new_scene(self):
cmds.file(newFile=True, force=True)
location = "{0}{1}{2}".format(os.path.dirname(os.path.realpath(__file__)), os.path.sep, self.cleanScene)
self.set_project(location)
cmds.file("cleanScene.ma", open=True)
select_dir = pm.fileDialog2(fileMode=2, dialogStyle=3, startingDirectory=self.statusDir)
if select_dir != None:
print select_dir[0]
sDir = str(select_dir[0])
result = cmds.promptDialog(
title='Asset Name',
message='Enter Name:',
button=['OK', 'Cancel'],
defaultButton='OK',
cancelButton='Cancel',
dismissString='Cancel')
if result == 'OK':
assetName = cmds.promptDialog(query=True, text=True)
print assetName
# makes project folder
projectFolder = os.path.join(sDir, assetName)
if not os.path.exists(projectFolder):
print "Creating {0}".format(projectFolder)
os.makedirs(projectFolder)
# makes scenes folder
scenesFolder = os.path.join(projectFolder, SCENE_FOLDER)
if not os.path.exists(scenesFolder):
print "Creating {0}".format(scenesFolder)
os.makedirs(scenesFolder)
# makes turntable folder
turntableFolder = os.path.join(projectFolder, TURNTABLE_FOLDER)
if not os.path.exists(turntableFolder):
print "Creating {0}".format(turntableFolder)
os.makedirs(turntableFolder)
# makes export folder
exportFolder = os.path.join(projectFolder, EXPORT_FOLDER)
if not os.path.exists(exportFolder):
print "Creating {0}".format(exportFolder)
os.makedirs(exportFolder)
# makes sourceimages folder
sourceimagesFolder = os.path.join(projectFolder, SOURCEIMAGES_FOLDER)
if not os.path.exists(sourceimagesFolder):
print "Creating {0}".format(sourceimagesFolder)
os.makedirs(sourceimagesFolder)
fileName = assetName + "_v001_" + get_author_initials() + ".ma"
fileSavePath = os.path.join(scenesFolder, fileName)
print fileSavePath
cmds.file(rename=fileSavePath)
cmds.file(save=True)
def set_project(self, location):
mel.setProject(location)
def setProjectAsCurrDirectory(self):
filePath = cmds.file(query=True, expandName=True)
directory = os.path.dirname(filePath)
project = os.path.dirname(directory)
self.set_project(project)
def importRefCube(self):
location = "{0}{1}{2}".format(os.path.dirname(os.path.realpath(__file__)), os.path.sep, self.cleanScene)
self.set_project(location)
cmds.file("refCube.ma", i=True)
self.setProjectAsCurrDirectory()
def update_check_list(self):
check_output = checkList.check_list()
output_errors = "\n".join(check_output[1])
if check_output[0]:
cmds.text(self.statusText, label=output_errors, edit=True, backgroundColor=[0, 1, 0])
else:
cmds.text(self.statusText, label=output_errors, edit=True, backgroundColor=[1, 0, 0])
def reset_check_list(self):
cmds.text(self.statusText, edit=True, backgroundColor=[1, 1, 0])
def select_hierachy(self):
cmds.select(hi=1)
def select_top_node(self):
cmds.select(get_top_node())
def pivot_at_origin(self):
self.select_top_node()
xform(zeroTransformPivots=1)
def create_ui(self):
if get_author_initials() == 'mj':
bg_colour = [0.9, 0.4, 1]
else:
bg_colour = [0.4, 0.4, 0.4]
tab = str(cmds.columnLayout())
cmds.separator(style="none")
cmds.frameLayout(collapsable=True, label="Common")
cmds.columnLayout()
cmds.button(command=lambda *args: self.new_scene(), label="New Work Scene", backgroundColor=bg_colour)
cmds.button(command=lambda *args: self.setProjectAsCurrDirectory(), label="Set Project",
backgroundColor=bg_colour)
cmds.button(command=lambda *args: self.importRefCube(), label="Import Reference Cube",
backgroundColor=bg_colour)
cmds.button(command=lambda *args: mel.Reset(), label="Create Playblast Turntable",
backgroundColor=bg_colour)
cmds.button(command=lambda *args: exportFBX.export_asset(), label="Export as FBX", backgroundColor=bg_colour)
cmds.button(command=lambda *args: makeReference.make_reference(), label="Make Reference File",
backgroundColor=bg_colour)
cmds.setParent('..')
cmds.setParent('..')
cmds.frameLayout(collapsable=True, label="Status")
cmds.columnLayout(rowSpacing=2)
cmds.button(command=lambda *args: self.update_check_list(), label="Update Status", backgroundColor=bg_colour)
cmds.text(label="Status errors:", align="left", backgroundColor=[0.2, 0.2, 0.2], height=15)
self.statusText = cmds.text("Status", backgroundColor=[1, 1, 0])
self.statusText = cmds.text(self.statusText, query=True, fullPathName=True)
cmds.setParent('..')
cmds.setParent('..')
cmds.frameLayout(collapsable=True, label="Check List")
cmds.columnLayout(rowSpacing=2)
cmds.button(command=lambda *args: fixAllBelow.fixAllBelow(), label="Run All Fix Scripts Below",
backgroundColor=bg_colour)
cmds.button(command=lambda *args: hfFixBadShading(), label="Fix Face Assignments on Scene Objects",
backgroundColor=bg_colour)
cmds.button(command=lambda *args: mel.deleteUnusedNodes(), label="Delete Unused Nodes",
backgroundColor=bg_colour)
cmds.button(command=lambda *args: self.select_top_node(), label="Select Top Node", backgroundColor=bg_colour)
cmds.button(command=lambda *args: self.select_hierachy(), label="Select Hierarchy", backgroundColor=bg_colour)
cmds.button(command=lambda *args: mel.FreezeTransformations(), label="Freeze Transformations",
backgroundColor=bg_colour)
cmds.button(command=lambda *args: mel.DeleteHistory(), label="Delete History", backgroundColor=bg_colour)
cmds.button(command=lambda *args: self.pivot_at_origin(), label="Pivot at Origin", backgroundColor=bg_colour)
cmds.button(command=lambda *args: deleteDPLayers.deleteDPLayers(), label="Delete Display Layers",
backgroundColor=bg_colour)
cmds.setParent('..')
cmds.setParent('..')
cmds.setParent('..')
return tab, "Scene"
def initModule(manager):
return ModuleScene(manager)
|
gpl-2.0
| 5,604,648,493,260,993,000
| 39.334975
| 118
| 0.636619
| false
| 3.786772
| false
| false
| false
|
webu/pybbm
|
pybb/models.py
|
1
|
20374
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.core.urlresolvers import reverse
from django.db import models, transaction, DatabaseError
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.html import strip_tags
from django.utils.translation import ugettext_lazy as _
from django.utils.timezone import now as tznow
from pybb.compat import get_user_model_path, get_username_field, get_atomic_func, slugify
from pybb import defaults
from pybb.profiles import PybbProfile
from pybb.util import unescape, FilePathGenerator, _get_markup_formatter
from annoying.fields import AutoOneToOneField
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(_('Name'), max_length=80)
position = models.IntegerField(_('Position'), blank=True, default=0)
hidden = models.BooleanField(_('Hidden'), blank=False, null=False, default=False,
help_text=_('If checked, this category will be visible only for staff'))
slug = models.SlugField(_("Slug"), max_length=255, unique=True)
class Meta(object):
ordering = ['position']
verbose_name = _('Category')
verbose_name_plural = _('Categories')
def __str__(self):
return self.name
def forum_count(self):
return self.forums.all().count()
def get_absolute_url(self):
if defaults.PYBB_NICE_URL:
return reverse('pybb:category', kwargs={'slug': self.slug, })
return reverse('pybb:category', kwargs={'pk': self.id})
@property
def topics(self):
return Topic.objects.filter(forum__category=self).select_related()
@property
def posts(self):
return Post.objects.filter(topic__forum__category=self).select_related()
@python_2_unicode_compatible
class Forum(models.Model):
category = models.ForeignKey(Category, related_name='forums', verbose_name=_('Category'))
parent = models.ForeignKey('self', related_name='child_forums', verbose_name=_('Parent forum'),
blank=True, null=True)
name = models.CharField(_('Name'), max_length=80)
position = models.IntegerField(_('Position'), blank=True, default=0)
description = models.TextField(_('Description'), blank=True)
moderators = models.ManyToManyField(get_user_model_path(), blank=True, verbose_name=_('Moderators'))
updated = models.DateTimeField(_('Updated'), blank=True, null=True)
post_count = models.IntegerField(_('Post count'), blank=True, default=0)
topic_count = models.IntegerField(_('Topic count'), blank=True, default=0)
hidden = models.BooleanField(_('Hidden'), blank=False, null=False, default=False)
readed_by = models.ManyToManyField(get_user_model_path(), through='ForumReadTracker', related_name='readed_forums')
headline = models.TextField(_('Headline'), blank=True, null=True)
slug = models.SlugField(verbose_name=_("Slug"), max_length=255)
class Meta(object):
ordering = ['position']
verbose_name = _('Forum')
verbose_name_plural = _('Forums')
unique_together = ('category', 'slug')
def __str__(self):
return self.name
def update_counters(self):
self.topic_count = Topic.objects.filter(forum=self).count()
if self.topic_count:
posts = Post.objects.filter(topic__forum_id=self.id)
self.post_count = posts.count()
if self.post_count:
try:
last_post = posts.order_by('-created', '-id')[0]
self.updated = last_post.updated or last_post.created
except IndexError:
pass
else:
self.post_count = 0
self.save()
def get_absolute_url(self):
if defaults.PYBB_NICE_URL:
return reverse('pybb:forum', kwargs={'slug': self.slug, 'category_slug': self.category.slug})
return reverse('pybb:forum', kwargs={'pk': self.id})
@property
def posts(self):
return Post.objects.filter(topic__forum=self).select_related()
@cached_property
def last_post(self):
try:
return self.posts.order_by('-created', '-id')[0]
except IndexError:
return None
def get_parents(self):
"""
Used in templates for breadcrumb building
"""
parents = [self.category]
parent = self.parent
while parent is not None:
parents.insert(1, parent)
parent = parent.parent
return parents
@python_2_unicode_compatible
class ForumSubscription(models.Model):
TYPE_NOTIFY = 1
TYPE_SUBSCRIBE = 2
TYPE_CHOICES = (
(TYPE_NOTIFY, _('be notified only when a new topic is added')),
(TYPE_SUBSCRIBE, _('be auto-subscribed to topics')),
)
user = models.ForeignKey(get_user_model_path(), on_delete=models.CASCADE,
related_name='forum_subscriptions+', verbose_name=_('Subscriber'))
forum = models.ForeignKey(Forum,
related_name='subscriptions+', verbose_name=_('Forum'))
type = models.PositiveSmallIntegerField(
_('Subscription type'), choices=TYPE_CHOICES,
help_text=_((
'The auto-subscription works like you manually subscribed to watch each topic :\n'
'you will be notified when a topic will receive an answer. \n'
'If you choose to be notified only when a new topic is added. It means'
'you will be notified only once when the topic is created : '
'you won\'t be notified for the answers.'
)), )
class Meta(object):
verbose_name = _('Subscription to forum')
verbose_name_plural = _('Subscriptions to forums')
unique_together = ('user', 'forum',)
def __str__(self):
return '%(user)s\'s subscription to "%(forum)s"' % {'user': self.user,
'forum': self.forum}
def save(self, all_topics=False, **kwargs):
if all_topics and self.type == self.TYPE_SUBSCRIBE:
old = None if not self.pk else ForumSubscription.objects.get(pk=self.pk)
if not old or old.type != self.type :
topics = Topic.objects.filter(forum=self.forum).exclude(subscribers=self.user)
self.user.subscriptions.add(*topics)
super(ForumSubscription, self).save(**kwargs)
def delete(self, all_topics=False, **kwargs):
if all_topics:
topics = Topic.objects.filter(forum=self.forum, subscribers=self.user)
self.user.subscriptions.remove(*topics)
super(ForumSubscription, self).delete(**kwargs)
@python_2_unicode_compatible
class Topic(models.Model):
POLL_TYPE_NONE = 0
POLL_TYPE_SINGLE = 1
POLL_TYPE_MULTIPLE = 2
POLL_TYPE_CHOICES = (
(POLL_TYPE_NONE, _('None')),
(POLL_TYPE_SINGLE, _('Single answer')),
(POLL_TYPE_MULTIPLE, _('Multiple answers')),
)
forum = models.ForeignKey(Forum, related_name='topics', verbose_name=_('Forum'))
name = models.CharField(_('Subject'), max_length=255)
created = models.DateTimeField(_('Created'), null=True)
updated = models.DateTimeField(_('Updated'), null=True)
user = models.ForeignKey(get_user_model_path(), verbose_name=_('User'))
views = models.IntegerField(_('Views count'), blank=True, default=0)
sticky = models.BooleanField(_('Sticky'), blank=True, default=False)
closed = models.BooleanField(_('Closed'), blank=True, default=False)
subscribers = models.ManyToManyField(get_user_model_path(), related_name='subscriptions',
verbose_name=_('Subscribers'), blank=True)
post_count = models.IntegerField(_('Post count'), blank=True, default=0)
readed_by = models.ManyToManyField(get_user_model_path(), through='TopicReadTracker', related_name='readed_topics')
on_moderation = models.BooleanField(_('On moderation'), default=False)
poll_type = models.IntegerField(_('Poll type'), choices=POLL_TYPE_CHOICES, default=POLL_TYPE_NONE)
poll_question = models.TextField(_('Poll question'), blank=True, null=True)
slug = models.SlugField(verbose_name=_("Slug"), max_length=255)
class Meta(object):
ordering = ['-created']
verbose_name = _('Topic')
verbose_name_plural = _('Topics')
unique_together = ('forum', 'slug')
def __str__(self):
return self.name
@cached_property
def head(self):
try:
return self.posts.all().order_by('created', 'id')[0]
except IndexError:
return None
@cached_property
def last_post(self):
try:
return self.posts.order_by('-created', '-id').select_related('user')[0]
except IndexError:
return None
def get_absolute_url(self):
if defaults.PYBB_NICE_URL:
return reverse('pybb:topic', kwargs={'slug': self.slug, 'forum_slug': self.forum.slug, 'category_slug': self.forum.category.slug})
return reverse('pybb:topic', kwargs={'pk': self.id})
def save(self, *args, **kwargs):
if self.id is None:
self.created = self.updated = tznow()
forum_changed = False
old_topic = None
if self.id is not None:
old_topic = Topic.objects.get(id=self.id)
if self.forum != old_topic.forum:
forum_changed = True
super(Topic, self).save(*args, **kwargs)
if forum_changed:
old_topic.forum.update_counters()
self.forum.update_counters()
def delete(self, using=None):
super(Topic, self).delete(using)
self.forum.update_counters()
def update_counters(self):
self.post_count = self.posts.count()
# force cache overwrite to get the real latest updated post
if hasattr(self, 'last_post'):
del self.last_post
if self.last_post:
self.updated = self.last_post.updated or self.last_post.created
self.save()
def get_parents(self):
"""
Used in templates for breadcrumb building
"""
parents = self.forum.get_parents()
parents.append(self.forum)
return parents
def poll_votes(self):
if self.poll_type != self.POLL_TYPE_NONE:
return PollAnswerUser.objects.filter(poll_answer__topic=self).count()
else:
return None
class RenderableItem(models.Model):
"""
Base class for models that has markup, body, body_text and body_html fields.
"""
class Meta(object):
abstract = True
body = models.TextField(_('Message'))
body_html = models.TextField(_('HTML version'))
body_text = models.TextField(_('Text version'))
def render(self):
self.body_html = _get_markup_formatter()(self.body, instance=self)
# Remove tags which was generated with the markup processor
text = strip_tags(self.body_html)
# Unescape entities which was generated with the markup processor
self.body_text = unescape(text)
@python_2_unicode_compatible
class Post(RenderableItem):
topic = models.ForeignKey(Topic, related_name='posts', verbose_name=_('Topic'))
user = models.ForeignKey(get_user_model_path(), related_name='posts', verbose_name=_('User'))
created = models.DateTimeField(_('Created'), blank=True, db_index=True)
updated = models.DateTimeField(_('Updated'), blank=True, null=True)
user_ip = models.GenericIPAddressField(_('User IP'), blank=True, null=True, default='0.0.0.0')
on_moderation = models.BooleanField(_('On moderation'), default=False)
class Meta(object):
ordering = ['created']
verbose_name = _('Post')
verbose_name_plural = _('Posts')
def summary(self):
limit = 50
tail = len(self.body) > limit and '...' or ''
return self.body[:limit] + tail
def __str__(self):
return self.summary()
def save(self, *args, **kwargs):
created_at = tznow()
if self.created is None:
self.created = created_at
self.render()
new = self.pk is None
topic_changed = False
old_post = None
if not new:
old_post = Post.objects.get(pk=self.pk)
if old_post.topic != self.topic:
topic_changed = True
super(Post, self).save(*args, **kwargs)
# If post is topic head and moderated, moderate topic too
if self.topic.head == self and not self.on_moderation and self.topic.on_moderation:
self.topic.on_moderation = False
self.topic.update_counters()
self.topic.forum.update_counters()
if topic_changed:
old_post.topic.update_counters()
old_post.topic.forum.update_counters()
def get_absolute_url(self):
return reverse('pybb:post', kwargs={'pk': self.id})
def delete(self, *args, **kwargs):
self_id = self.id
head_post_id = self.topic.posts.order_by('created', 'id')[0].id
if self_id == head_post_id:
self.topic.delete()
else:
super(Post, self).delete(*args, **kwargs)
self.topic.update_counters()
self.topic.forum.update_counters()
def get_parents(self):
"""
Used in templates for breadcrumb building
"""
return self.topic.forum.category, self.topic.forum, self.topic,
class Profile(PybbProfile):
"""
Profile class that can be used if you doesn't have
your site profile.
"""
user = AutoOneToOneField(get_user_model_path(), related_name='pybb_profile', verbose_name=_('User'))
class Meta(object):
verbose_name = _('Profile')
verbose_name_plural = _('Profiles')
def get_absolute_url(self):
return reverse('pybb:user', kwargs={'username': getattr(self.user, get_username_field())})
def get_display_name(self):
return self.user.get_username()
class Attachment(models.Model):
class Meta(object):
verbose_name = _('Attachment')
verbose_name_plural = _('Attachments')
post = models.ForeignKey(Post, verbose_name=_('Post'), related_name='attachments')
size = models.IntegerField(_('Size'))
file = models.FileField(_('File'),
upload_to=FilePathGenerator(to=defaults.PYBB_ATTACHMENT_UPLOAD_TO))
def save(self, *args, **kwargs):
self.size = self.file.size
super(Attachment, self).save(*args, **kwargs)
def size_display(self):
size = self.size
if size < 1024:
return '%db' % size
elif size < 1024 * 1024:
return '%dKb' % int(size / 1024)
else:
return '%.2fMb' % (size / float(1024 * 1024))
class TopicReadTrackerManager(models.Manager):
def get_or_create_tracker(self, user, topic):
"""
Correctly create tracker in mysql db on default REPEATABLE READ transaction mode
It's known problem when standrard get_or_create method return can raise exception
with correct data in mysql database.
See http://stackoverflow.com/questions/2235318/how-do-i-deal-with-this-race-condition-in-django/2235624
"""
is_new = True
sid = transaction.savepoint(using=self.db)
try:
with get_atomic_func()():
obj = TopicReadTracker.objects.create(user=user, topic=topic)
transaction.savepoint_commit(sid)
except DatabaseError:
transaction.savepoint_rollback(sid)
obj = TopicReadTracker.objects.get(user=user, topic=topic)
is_new = False
return obj, is_new
class TopicReadTracker(models.Model):
"""
Save per user topic read tracking
"""
user = models.ForeignKey(get_user_model_path(), blank=False, null=False)
topic = models.ForeignKey(Topic, blank=True, null=True)
time_stamp = models.DateTimeField(auto_now=True)
objects = TopicReadTrackerManager()
class Meta(object):
verbose_name = _('Topic read tracker')
verbose_name_plural = _('Topic read trackers')
unique_together = ('user', 'topic')
class ForumReadTrackerManager(models.Manager):
def get_or_create_tracker(self, user, forum):
"""
Correctly create tracker in mysql db on default REPEATABLE READ transaction mode
It's known problem when standrard get_or_create method return can raise exception
with correct data in mysql database.
See http://stackoverflow.com/questions/2235318/how-do-i-deal-with-this-race-condition-in-django/2235624
"""
is_new = True
sid = transaction.savepoint(using=self.db)
try:
with get_atomic_func()():
obj = ForumReadTracker.objects.create(user=user, forum=forum)
transaction.savepoint_commit(sid)
except DatabaseError:
transaction.savepoint_rollback(sid)
is_new = False
obj = ForumReadTracker.objects.get(user=user, forum=forum)
return obj, is_new
class ForumReadTracker(models.Model):
"""
Save per user forum read tracking
"""
user = models.ForeignKey(get_user_model_path(), blank=False, null=False)
forum = models.ForeignKey(Forum, blank=True, null=True)
time_stamp = models.DateTimeField(auto_now=True)
objects = ForumReadTrackerManager()
class Meta(object):
verbose_name = _('Forum read tracker')
verbose_name_plural = _('Forum read trackers')
unique_together = ('user', 'forum')
@python_2_unicode_compatible
class PollAnswer(models.Model):
topic = models.ForeignKey(Topic, related_name='poll_answers', verbose_name=_('Topic'))
text = models.CharField(max_length=255, verbose_name=_('Text'))
class Meta:
verbose_name = _('Poll answer')
verbose_name_plural = _('Polls answers')
def __str__(self):
return self.text
def votes(self):
return self.users.count()
def votes_percent(self):
topic_votes = self.topic.poll_votes()
if topic_votes > 0:
return 1.0 * self.votes() / topic_votes * 100
else:
return 0
@python_2_unicode_compatible
class PollAnswerUser(models.Model):
poll_answer = models.ForeignKey(PollAnswer, related_name='users', verbose_name=_('Poll answer'))
user = models.ForeignKey(get_user_model_path(), related_name='poll_answers', verbose_name=_('User'))
timestamp = models.DateTimeField(auto_now_add=True)
class Meta:
verbose_name = _('Poll answer user')
verbose_name_plural = _('Polls answers users')
unique_together = (('poll_answer', 'user', ), )
def __str__(self):
return '%s - %s' % (self.poll_answer.topic, self.user)
def create_or_check_slug(instance, model, **extra_filters):
"""
returns a unique slug
:param instance : target instance
:param model: needed as instance._meta.model is available since django 1.6
:param extra_filters: filters needed for Forum and Topic for their unique_together field
"""
initial_slug = instance.slug or slugify(instance.name)
count = -1
last_count_len = 0
slug_is_not_unique = True
while slug_is_not_unique:
count += 1
if count >= defaults.PYBB_NICE_URL_SLUG_DUPLICATE_LIMIT:
msg = _('After %(limit)s attemps, there is not any unique slug value for "%(slug)s"')
raise ValidationError(msg % {'limit': defaults.PYBB_NICE_URL_SLUG_DUPLICATE_LIMIT,
'slug': initial_slug})
count_len = len(str(count))
if last_count_len != count_len:
last_count_len = count_len
filters = {'slug__startswith': initial_slug[:(254-count_len)], }
if extra_filters:
filters.update(extra_filters)
objs = model.objects.filter(**filters).exclude(pk=instance.pk)
slug_list = [obj.slug for obj in objs]
if count == 0:
slug = initial_slug
else:
slug = '%s-%d' % (initial_slug[:(254-count_len)], count)
slug_is_not_unique = slug in slug_list
return slug
|
bsd-2-clause
| 5,318,720,958,938,460,000
| 35.70991
| 142
| 0.620742
| false
| 3.986304
| false
| false
| false
|
HewlettPackard/python-proliant-sdk
|
examples/Redfish/ex23_dump_ilo_event_log.py
|
1
|
2839
|
# Copyright 2016 Hewlett Packard Enterprise Development, LP.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from _redfishobject import RedfishObject
from redfish.rest.v1 import ServerDownOrUnreachableError
def ex23_dump_ilo_event_log(redfishobj):
sys.stdout.write("\nEXAMPLE 23: Dump iLO Event Log\n")
instances = redfishobj.search_for_type("LogService.")
for instance in instances:
if instance["@odata.id"].endswith("IEL/"):
tmp = redfishobj.redfish_get(instance["@odata.id"])
rsp = redfishobj.redfish_get(tmp.dict["Entries"]["@odata.id"])
for entry in rsp.dict["Members"]:
response = redfishobj.redfish_get(entry["@odata.id"])
sys.stdout.write(response.dict["Message"] + "\n")
while 'NextPage' in rsp.dict["Members"]:
response = redfishobj.redfish_get(entry["@odata.id"] + \
'?page=' + \
str(response.dict["Entries"] \
['NextPage']['page']))
sys.stdout.write(response.dict["Message"] + "\n")
redfishobj.error_handler(response)
if __name__ == "__main__":
# When running on the server locally use the following commented values
# iLO_https_url = "blobstore://."
# iLO_account = "None"
# iLO_password = "None"
# When running remotely connect using the iLO secured (https://) address,
# iLO account name, and password to send https requests
# iLO_https_url acceptable examples:
# "https://10.0.0.100"
# "https://f250asha.americas.hpqcorp.net"
iLO_https_url = "https://10.0.0.100"
iLO_account = "admin"
iLO_password = "password"
# Create a REDFISH object
try:
REDFISH_OBJ = RedfishObject(iLO_https_url, iLO_account, iLO_password)
except ServerDownOrUnreachableError, excp:
sys.stderr.write("ERROR: server not reachable or doesn't support " \
"RedFish.\n")
sys.exit()
except Exception, excp:
raise excp
ex23_dump_ilo_event_log(REDFISH_OBJ)
|
apache-2.0
| 5,327,622,165,731,874,000
| 40.402985
| 78
| 0.586122
| false
| 3.954039
| false
| false
| false
|
eugene-eeo/mailthon
|
mailthon/envelope.py
|
1
|
1654
|
"""
mailthon.envelope
~~~~~~~~~~~~~~~~~
Implements the Envelope object.
:copyright: (c) 2015 by Eeo Jun
:license: MIT, see LICENSE for details.
"""
class Envelope(object):
"""
Enclosure adapter for encapsulating the concept of
an Envelope- a wrapper around some content in the
form of an *enclosure*, and dealing with SMTP
specific idiosyncracies.
:param enclosure: An enclosure object to wrap around.
:param mail_from: The "real" sender. May be omitted.
:param rcpt_to: A list of "real" email addresses.
May be omitted.
"""
def __init__(self, enclosure, mail_from=None, rcpt_to=None):
self.enclosure = enclosure
self.mail_from = mail_from
self.rcpt_to = rcpt_to
@property
def sender(self):
"""
Returns the real sender if set in the *mail_from*
parameter/attribute, else returns the sender
attribute from the wrapped enclosure.
"""
return self.mail_from or self.enclosure.sender
@property
def receivers(self):
"""
Returns the "real" receivers which will be passed
to the ``RCPT TO`` command (in SMTP) if specified
in the *rcpt_to* attribute/parameter. Else, return
the receivers attribute from the wrapped enclosure.
"""
return self.rcpt_to or self.enclosure.receivers
def mime(self):
"""
Returns the mime object from the enclosure.
"""
return self.enclosure.mime()
def string(self):
"""
Returns the stringified mime object.
"""
return self.enclosure.string()
|
mit
| 1,613,581,448,409,228,000
| 27.033898
| 64
| 0.612455
| false
| 4.145363
| false
| false
| false
|
datagrok/python-misc
|
datagrok/math/vector.py
|
1
|
1548
|
"""Snippets from linear algebra class"""
from numpy import dot, array, sqrt, matrix
# TODO: many of these may be part of numpy now. Check and cull
def proj(M,x):
"""
>>> A = array([[1, 2], [2, 1]])
>>> x = array([[1], [2]])
>>> proj(A, x)
matrix([[ 1.],
[ 2.]])
"""
# proj_w(x) = M(M^TM)^-1M^Tx
M = matrix(M)
return M * (M.T * M).I * M.T * x
def mat_array(s):
"""Returns an array created from a spaces-and-lines blob of data.
>>> mat_array('''
... 1 2 3
... 4 5 6
... 7 8 9
... ''')
array([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])
"""
return array([[int(v) for v in row.strip().split()] for row in [l for l in s.splitlines() if l]])
def col_array(s):
"""Returns transpose of mat_array.
>>> col_array('''
... 1 2 3
... 4 5 6
... 7 8 9
... ''')
array([[1, 4, 7],
[2, 5, 8],
[3, 6, 9]])
"""
return (mat_array(s)).T
def norm(x):
"""Returns the norm (length) of vector x
>>> norm(array([3, 4]))
5.0
"""
return sqrt(sum(x*x))
def unit(x):
"""Returns a unit vector in the direction of vector x.
>>> unit(array([9, 0]))
array([ 1., 0.])
>>> unit(array([0, 9]))
array([ 0., 1.])
>>> unit(array([9, 9]))
array([ 0.70710678, 0.70710678])
"""
return x/norm(x)
def lss(A, b):
"""Finds the least squares solution for Ax=b"""
A = matrix(A)
return (A.T * A).I * A.T * b
|
agpl-3.0
| -8,830,901,386,614,149,000
| 18.35
| 101
| 0.443152
| false
| 2.988417
| false
| false
| false
|
wasim21k/pihome
|
cron/login.py
|
1
|
2609
|
#!/usr/bin/python
# add following line to show up when some one ssh to pi /etc/profile
# sudo python /var/www/cron/login.py
# clear everything from /etc/motd to remove generic message.
import socket, os, re, time, sys, subprocess, fcntl, struct
from threading import Thread
class bc:
HEADER = '\033[0;36;40m'
ENDC = '\033[0m'
SUB = '\033[3;30;45m'
WARN = '\033[0;31;40m'
GREEN = '\033[0;32;40m'
org = '\033[91m'
print bc.HEADER + " "
print " _____ _ _ _ "
print " | __ \ (_) | | | | "
print " | |__) | _ | |__| | ___ _ __ ___ ___ "
print " | ___/ | | | __ | / _ \ | |_ \_ \ / _ \ "
print " | | | | | | | | | (_) | | | | | | | | __/"
print " |_| |_| |_| |_| \___/ |_| |_| |_| \___|"
print " "
print " "+bc.SUB + "S M A R T H E A T I N G C O N T R O L "+ bc.ENDC
print bc.WARN +" "
print "*************************************************************************"
print "* PiHome is Raspberry Pi based Central Heating Control systems. It runs *"
print "* from web interface and it comes with ABSOLUTELY NO WARRANTY, to the *"
print "* extent permitted by applicable law. I take no responsibility for any *"
print "* loss or damage to you or your property. *"
print "* DO NOT MAKE ANY CHANGES TO YOUR HEATING SYSTEM UNTILL UNLESS YOU KNOW *"
print "* WHAT YOU ARE DOING *"
print "*************************************************************************"
print bc.GREEN +" Have Fun - PiHome" + bc.ENDC
df = subprocess.Popen(["df", "-h"], stdout=subprocess.PIPE)
output = df.communicate()[0]
device, size, used, available, percent, mountpoint = \
output.split("\n")[1].split()
print bc.org +"Disk/SD Card Usage" + bc.ENDC
print "Filesystem Size Used Avail Used%"
print device+" "+size+" "+used+" "+available+" "+percent
def get_interface_ip(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s', ifname[:15]))[20:24])
def get_ip():
ip = socket.gethostbyname(socket.gethostname())
if ip.startswith("127."):
interfaces = ["eth0","eth1","eth2","wlan0","wlan1","wifi0","ath0","ath1","ppp0"]
for ifname in interfaces:
try:
ip = get_interface_ip(ifname)
break
except IOError:
pass
return ip
print "WebServer: "+bc.GREEN +"http://"+str(get_ip())+"/"+ bc.ENDC
print "PhpMyAdmin: "+bc.GREEN +"http://"+str(get_ip())+"/phpmyadmin"+ bc.ENDC
|
gpl-3.0
| -8,385,152,448,797,073,000
| 44
| 100
| 0.51744
| false
| 3.047897
| false
| false
| false
|
melodous/designate
|
designate/cmd/central.py
|
1
|
1243
|
# Copyright 2013 Hewlett-Packard Development Company, L.P.
#
# Author: Kiall Mac Innes <kiall@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from oslo.config import cfg
from designate.openstack.common import log as logging
from designate import service
from designate import utils
from designate.central import service as central
CONF = cfg.CONF
CONF.import_opt('workers', 'designate.central', group='service:central')
def main():
utils.read_config('designate', sys.argv)
logging.setup('designate')
server = central.Service.create(binary='designate-central',
service_name='central')
service.serve(server, workers=CONF['service:central'].workers)
service.wait()
|
apache-2.0
| -8,144,857,311,752,288,000
| 32.594595
| 75
| 0.735318
| false
| 3.996785
| false
| false
| false
|
jds2001/sos
|
sos/plugins/filesys.py
|
1
|
2190
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
from sos.plugins import Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin
class Filesys(Plugin, RedHatPlugin, DebianPlugin, UbuntuPlugin):
"""Local file systems
"""
plugin_name = 'filesys'
profiles = ('storage',)
option_list = [("lsof", 'gathers information on all open files', 'slow',
False),
("dumpe2fs", 'dump filesystem information', 'slow', False)]
def setup(self):
self.add_copy_spec([
"/proc/filesystems",
"/etc/fstab",
"/proc/self/mounts",
"/proc/self/mountinfo",
"/proc/self/mountstats",
"/proc/mounts"
])
self.add_cmd_output("mount -l", root_symlink="mount")
self.add_cmd_output("df -al", root_symlink="df")
self.add_cmd_output([
"df -ali",
"findmnt"
])
if self.get_option('lsof'):
self.add_cmd_output("lsof -b +M -n -l -P", root_symlink="lsof")
dumpe2fs_opts = '-h'
if self.get_option('dumpe2fs'):
dumpe2fs_opts = ''
mounts = '/proc/mounts'
ext_fs_regex = r"^(/dev/.+).+ext[234]\s+"
for dev in self.do_regex_find_all(ext_fs_regex, mounts):
self.add_cmd_output("dumpe2fs %s %s" % (dumpe2fs_opts, dev))
def postproc(self):
self.do_file_sub(
"/etc/fstab",
r"(password=)[^\s]*",
r"\1********"
)
# vim: set et ts=4 sw=4 :
|
gpl-2.0
| -7,307,108,655,078,007,000
| 33.761905
| 78
| 0.594064
| false
| 3.680672
| false
| false
| false
|
yephper/django
|
django/contrib/gis/db/backends/mysql/operations.py
|
1
|
3451
|
from django.contrib.gis.db.backends.base.adapter import WKTAdapter
from django.contrib.gis.db.backends.base.operations import \
BaseSpatialOperations
from django.contrib.gis.db.backends.utils import SpatialOperator
from django.contrib.gis.db.models import aggregates
from django.db.backends.mysql.operations import DatabaseOperations
from django.utils.functional import cached_property
class MySQLOperations(BaseSpatialOperations, DatabaseOperations):
mysql = True
name = 'mysql'
Adapter = WKTAdapter
@cached_property
def select(self):
if self.connection.mysql_version < (5, 6, 0):
return 'AsText(%s)'
return 'ST_AsText(%s)'
@cached_property
def from_wkb(self):
if self.connection.mysql_version < (5, 6, 0):
return 'GeomFromWKB'
return 'ST_GeomFromWKB'
@cached_property
def from_text(self):
if self.connection.mysql_version < (5, 6, 0):
return 'GeomFromText'
return 'ST_GeomFromText'
gis_operators = {
'bbcontains': SpatialOperator(func='MBRContains'), # For consistency w/PostGIS API
'bboverlaps': SpatialOperator(func='MBROverlaps'), # .. ..
'contained': SpatialOperator(func='MBRWithin'), # .. ..
'contains': SpatialOperator(func='MBRContains'),
'disjoint': SpatialOperator(func='MBRDisjoint'),
'equals': SpatialOperator(func='MBREqual'),
'exact': SpatialOperator(func='MBREqual'),
'intersects': SpatialOperator(func='MBRIntersects'),
'overlaps': SpatialOperator(func='MBROverlaps'),
'same_as': SpatialOperator(func='MBREqual'),
'touches': SpatialOperator(func='MBRTouches'),
'within': SpatialOperator(func='MBRWithin'),
}
@cached_property
def function_names(self):
return {
'Difference': 'ST_Difference',
'Distance': 'ST_Distance',
'Intersection': 'ST_Intersection',
'Length': 'GLength' if self.connection.mysql_version < (5, 6, 0) else 'ST_Length',
'SymDifference': 'ST_SymDifference',
'Union': 'ST_Union',
}
disallowed_aggregates = (
aggregates.Collect, aggregates.Extent, aggregates.Extent3D,
aggregates.MakeLine, aggregates.Union,
)
@cached_property
def unsupported_functions(self):
unsupported = {
'AsGeoJSON', 'AsGML', 'AsKML', 'AsSVG', 'BoundingCircle',
'ForceRHR', 'GeoHash', 'MemSize',
'Perimeter', 'PointOnSurface', 'Reverse', 'Scale', 'SnapToGrid',
'Transform', 'Translate',
}
if self.connection.mysql_version < (5, 6, 1):
unsupported.update({'Difference', 'Distance', 'Intersection', 'SymDifference', 'Union'})
return unsupported
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, f, value, compiler):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'as_sql'):
placeholder, _ = compiler.compile(value)
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
|
bsd-3-clause
| 4,677,460,586,627,847,000
| 35.923077
| 100
| 0.611417
| false
| 4.098575
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.