repo_name
stringlengths 5
92
| path
stringlengths 4
221
| copies
stringclasses 19
values | size
stringlengths 4
6
| content
stringlengths 766
896k
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 32
997
| alpha_frac
float64 0.25
0.96
| autogenerated
bool 1
class | ratio
float64 1.5
13.6
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
lvapeab/nmt-keras
|
tests/NMT_architectures/attention_ConditionalLSTM.py
|
1
|
8620
|
import argparse
import os
import pytest
from tests.test_config import load_tests_params, clean_dirs
from data_engine.prepare_data import build_dataset
from nmt_keras.training import train_model
from nmt_keras.apply_model import sample_ensemble, score_corpus
def test_ConditionalLSTM_add():
params = load_tests_params()
# Current test params: Single layered LSTM - ConditionalGRU
params['BIDIRECTIONAL_ENCODER'] = True
params['N_LAYERS_ENCODER'] = 1
params['BIDIRECTIONAL_DEEP_ENCODER'] = True
params['ENCODER_RNN_TYPE'] = 'LSTM'
params['DECODER_RNN_TYPE'] = 'ConditionalLSTM'
params['N_LAYERS_DECODER'] = 1
params['ATTENTION_MODE'] = 'add'
params['REBUILD_DATASET'] = True
dataset = build_dataset(params)
params['INPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['INPUTS_IDS_DATASET'][0]]
params['OUTPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['OUTPUTS_IDS_DATASET'][0]]
params['MODEL_NAME'] = \
params['TASK_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '_' + params['MODEL_TYPE'] + \
'_src_emb_' + str(params['SOURCE_TEXT_EMBEDDING_SIZE']) + \
'_bidir_' + str(params['BIDIRECTIONAL_ENCODER']) + \
'_enc_' + params['ENCODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_ENCODER']) + '_' + str(
params['ENCODER_HIDDEN_SIZE']) + \
'_dec_' + params['DECODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_DECODER']) + '_' + str(
params['DECODER_HIDDEN_SIZE']) + params['ATTENTION_MODE'] + \
'_deepout_' + '_'.join([layer[0] for layer in params['DEEP_OUTPUT_LAYERS']]) + \
'_trg_emb_' + str(params['TARGET_TEXT_EMBEDDING_SIZE']) + \
'_' + params['OPTIMIZER'] + '_' + str(params['LR'])
# Test several NMT-Keras utilities: train, sample, sample_ensemble, score_corpus...
print("Training model")
train_model(params)
params['RELOAD'] = 1
print("Done")
parser = argparse.ArgumentParser('Parser for unit testing')
parser.dataset = os.path.join(
params['DATASET_STORE_PATH'],
'Dataset_' + params['DATASET_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '.pkl')
parser.text = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['SRC_LAN'])
parser.splits = ['val']
parser.config = params['STORE_PATH'] + '/config.pkl'
parser.models = [params['STORE_PATH'] + '/epoch_' + str(1)]
parser.verbose = 0
parser.dest = None
parser.source = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['SRC_LAN'])
parser.target = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['TRG_LAN'])
parser.weights = []
parser.glossary = None
for n_best in [True, False]:
parser.n_best = n_best
print("Sampling with n_best = %s " % str(n_best))
sample_ensemble(parser, params)
print("Done")
print("Scoring corpus")
score_corpus(parser, params)
print("Done")
clean_dirs(params)
def test_ConditionalLSTM_dot():
params = load_tests_params()
# Current test params: Single layered LSTM - ConditionalGRU
params['BIDIRECTIONAL_ENCODER'] = True
params['N_LAYERS_ENCODER'] = 1
params['BIDIRECTIONAL_DEEP_ENCODER'] = True
params['ENCODER_RNN_TYPE'] = 'LSTM'
params['DECODER_RNN_TYPE'] = 'ConditionalLSTM'
params['N_LAYERS_DECODER'] = 1
params['ATTENTION_MODE'] = 'dot'
params['REBUILD_DATASET'] = True
dataset = build_dataset(params)
params['INPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['INPUTS_IDS_DATASET'][0]]
params['OUTPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['OUTPUTS_IDS_DATASET'][0]]
params['MODEL_NAME'] = \
params['TASK_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '_' + params['MODEL_TYPE'] + \
'_src_emb_' + str(params['SOURCE_TEXT_EMBEDDING_SIZE']) + \
'_bidir_' + str(params['BIDIRECTIONAL_ENCODER']) + \
'_enc_' + params['ENCODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_ENCODER']) + '_' + str(
params['ENCODER_HIDDEN_SIZE']) + \
'_dec_' + params['DECODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_DECODER']) + '_' + str(
params['DECODER_HIDDEN_SIZE']) + params['ATTENTION_MODE'] + \
'_deepout_' + '_'.join([layer[0] for layer in params['DEEP_OUTPUT_LAYERS']]) + \
'_trg_emb_' + str(params['TARGET_TEXT_EMBEDDING_SIZE']) + \
'_' + params['OPTIMIZER'] + '_' + str(params['LR'])
# Test several NMT-Keras utilities: train, sample, sample_ensemble, score_corpus...
print("Training model")
train_model(params)
params['RELOAD'] = 1
print("Done")
parser = argparse.ArgumentParser('Parser for unit testing')
parser.dataset = os.path.join(
params['DATASET_STORE_PATH'],
'Dataset_' + params['DATASET_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '.pkl')
parser.text = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['SRC_LAN'])
parser.splits = ['val']
parser.config = params['STORE_PATH'] + '/config.pkl'
parser.models = [params['STORE_PATH'] + '/epoch_' + str(1)]
parser.verbose = 0
parser.dest = None
parser.source = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['SRC_LAN'])
parser.target = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['TRG_LAN'])
parser.weights = []
parser.glossary = None
for n_best in [True, False]:
parser.n_best = n_best
print("Sampling with n_best = %s " % str(n_best))
sample_ensemble(parser, params)
print("Done")
print("Scoring corpus")
score_corpus(parser, params)
print("Done")
clean_dirs(params)
def test_ConditionalLSTM_scaled():
params = load_tests_params()
# Current test params: Single layered LSTM - ConditionalGRU
params['BIDIRECTIONAL_ENCODER'] = True
params['N_LAYERS_ENCODER'] = 1
params['BIDIRECTIONAL_DEEP_ENCODER'] = True
params['ENCODER_RNN_TYPE'] = 'LSTM'
params['DECODER_RNN_TYPE'] = 'ConditionalLSTM'
params['N_LAYERS_DECODER'] = 1
params['ATTENTION_MODE'] = 'scaled-dot'
params['REBUILD_DATASET'] = True
dataset = build_dataset(params)
params['INPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['INPUTS_IDS_DATASET'][0]]
params['OUTPUT_VOCABULARY_SIZE'] = dataset.vocabulary_len[params['OUTPUTS_IDS_DATASET'][0]]
params['MODEL_NAME'] = \
params['TASK_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '_' + params['MODEL_TYPE'] + \
'_src_emb_' + str(params['SOURCE_TEXT_EMBEDDING_SIZE']) + \
'_bidir_' + str(params['BIDIRECTIONAL_ENCODER']) + \
'_enc_' + params['ENCODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_ENCODER']) + '_' + str(
params['ENCODER_HIDDEN_SIZE']) + \
'_dec_' + params['DECODER_RNN_TYPE'] + '_*' + str(params['N_LAYERS_DECODER']) + '_' + str(
params['DECODER_HIDDEN_SIZE']) + params['ATTENTION_MODE'] + \
'_deepout_' + '_'.join([layer[0] for layer in params['DEEP_OUTPUT_LAYERS']]) + \
'_trg_emb_' + str(params['TARGET_TEXT_EMBEDDING_SIZE']) + \
'_' + params['OPTIMIZER'] + '_' + str(params['LR'])
# Test several NMT-Keras utilities: train, sample, sample_ensemble, score_corpus...
print("Training model")
train_model(params)
params['RELOAD'] = 1
print("Done")
parser = argparse.ArgumentParser('Parser for unit testing')
parser.dataset = os.path.join(
params['DATASET_STORE_PATH'],
'Dataset_' + params['DATASET_NAME'] + '_' + params['SRC_LAN'] + params['TRG_LAN'] + '.pkl')
parser.text = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['SRC_LAN'])
parser.splits = ['val']
parser.config = params['STORE_PATH'] + '/config.pkl'
parser.models = [params['STORE_PATH'] + '/epoch_' + str(1)]
parser.verbose = 0
parser.dest = None
parser.source = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['SRC_LAN'])
parser.target = os.path.join(params['DATA_ROOT_PATH'], params['TEXT_FILES']['val'] + params['TRG_LAN'])
parser.weights = []
parser.glossary = None
for n_best in [True, False]:
parser.n_best = n_best
print("Sampling with n_best = %s " % str(n_best))
sample_ensemble(parser, params)
print("Done")
print("Scoring corpus")
score_corpus(parser, params)
print("Done")
clean_dirs(params)
if __name__ == '__main__':
pytest.main([__file__])
|
mit
| 6,003,570,170,736,867,000
| 42.756345
| 107
| 0.609281
| false
| 3.206845
| true
| false
| false
|
mathandy/svgpathtools
|
svgpathtools/paths2svg.py
|
1
|
19028
|
"""This submodule: basic tools for creating svg files from path data.
See also the document.py submodule.
"""
# External dependencies:
from __future__ import division, absolute_import, print_function
from math import ceil
from os import path as os_path, makedirs
from tempfile import gettempdir
from xml.dom.minidom import parse as md_xml_parse
from svgwrite import Drawing, text as txt
from time import time
from warnings import warn
import re
# Internal dependencies
from .path import Path, Line, is_path_segment
from .misctools import open_in_browser
# color shorthand for inputting color list as string of chars.
color_dict = {'a': 'aqua',
'b': 'blue',
'c': 'cyan',
'd': 'darkblue',
'e': '',
'f': '',
'g': 'green',
'h': '',
'i': '',
'j': '',
'k': 'black',
'l': 'lime',
'm': 'magenta',
'n': 'brown',
'o': 'orange',
'p': 'pink',
'q': 'turquoise',
'r': 'red',
's': 'salmon',
't': 'tan',
'u': 'purple',
'v': 'violet',
'w': 'white',
'x': '',
'y': 'yellow',
'z': 'azure'}
def str2colorlist(s, default_color=None):
color_list = [color_dict[ch] for ch in s]
if default_color:
for idx, c in enumerate(color_list):
if not c:
color_list[idx] = default_color
return color_list
def is3tuple(c):
return isinstance(c, tuple) and len(c) == 3
def big_bounding_box(paths_n_stuff):
"""returns minimal upright bounding box.
Args:
paths_n_stuff: iterable of Paths, Bezier path segments, and
points (given as complex numbers).
Returns:
extrema of bounding box, (xmin, xmax, ymin, ymax)
"""
bbs = []
for thing in paths_n_stuff:
if is_path_segment(thing) or isinstance(thing, Path):
bbs.append(thing.bbox())
elif isinstance(thing, complex):
bbs.append((thing.real, thing.real, thing.imag, thing.imag))
else:
try:
complexthing = complex(thing)
bbs.append((complexthing.real, complexthing.real,
complexthing.imag, complexthing.imag))
except ValueError:
raise TypeError("paths_n_stuff can only contains Path, "
"CubicBezier, QuadraticBezier, Line, "
"and complex objects.")
xmins, xmaxs, ymins, ymaxs = list(zip(*bbs))
xmin = min(xmins)
xmax = max(xmaxs)
ymin = min(ymins)
ymax = max(ymaxs)
return xmin, xmax, ymin, ymax
def disvg(paths=None, colors=None, filename=None, stroke_widths=None,
nodes=None, node_colors=None, node_radii=None,
openinbrowser=True, timestamp=None, margin_size=0.1,
mindim=600, dimensions=None, viewbox=None, text=None,
text_path=None, font_size=None, attributes=None,
svg_attributes=None, svgwrite_debug=False,
paths2Drawing=False, baseunit='px'):
"""Creates (and optionally displays) an SVG file.
REQUIRED INPUTS:
:param paths - a list of paths
OPTIONAL INPUT:
:param colors - specifies the path stroke color. By default all paths
will be black (#000000). This paramater can be input in a few ways
1) a list of strings that will be input into the path elements stroke
attribute (so anything that is understood by the svg viewer).
2) a string of single character colors -- e.g. setting colors='rrr' is
equivalent to setting colors=['red', 'red', 'red'] (see the
'color_dict' dictionary above for a list of possibilities).
3) a list of rgb 3-tuples -- e.g. colors = [(255, 0, 0), ...].
:param filename - the desired location/filename of the SVG file
created (by default the SVG will be named 'disvg_output.svg' or
'disvg_output_<timestamp>.svg' and stored in the temporary
directory returned by `tempfile.gettempdir()`. See `timestamp`
for information on the timestamp.
:param stroke_widths - a list of stroke_widths to use for paths
(default is 0.5% of the SVG's width or length)
:param nodes - a list of points to draw as filled-in circles
:param node_colors - a list of colors to use for the nodes (by default
nodes will be red)
:param node_radii - a list of radii to use for the nodes (by default
nodes will be radius will be 1 percent of the svg's width/length)
:param text - string or list of strings to be displayed
:param text_path - if text is a list, then this should be a list of
path (or path segments of the same length. Note: the path must be
long enough to display the text or the text will be cropped by the svg
viewer.
:param font_size - a single float of list of floats.
:param openinbrowser - Set to True to automatically open the created
SVG in the user's default web browser.
:param timestamp - if true, then the a timestamp will be
appended to the output SVG's filename. This is meant as a
workaround for issues related to rapidly opening multiple
SVGs in your browser using `disvg`. This defaults to true if
`filename is None` and false otherwise.
:param margin_size - The min margin (empty area framing the collection
of paths) size used for creating the canvas and background of the SVG.
:param mindim - The minimum dimension (height or width) of the output
SVG (default is 600).
:param dimensions - The (x,y) display dimensions of the output SVG.
I.e. this specifies the `width` and `height` SVG attributes. Note that
these also can be used to specify units other than pixels. Using this
will override the `mindim` parameter.
:param viewbox - This specifies the coordinated system used in the svg.
The SVG `viewBox` attribute works together with the the `height` and
`width` attrinutes. Using these three attributes allows for shifting
and scaling of the SVG canvas without changing the any values other
than those in `viewBox`, `height`, and `width`. `viewbox` should be
input as a 4-tuple, (min_x, min_y, width, height), or a string
"min_x min_y width height". Using this will override the `mindim`
parameter.
:param attributes - a list of dictionaries of attributes for the input
paths. Note: This will override any other conflicting settings.
:param svg_attributes - a dictionary of attributes for output svg.
:param svgwrite_debug - This parameter turns on/off `svgwrite`'s
debugging mode. By default svgwrite_debug=False. This increases
speed and also prevents `svgwrite` from raising of an error when not
all `svg_attributes` key-value pairs are understood.
:param paths2Drawing - If true, an `svgwrite.Drawing` object is
returned and no file is written. This `Drawing` can later be saved
using the `svgwrite.Drawing.save()` method.
NOTES:
* The `svg_attributes` parameter will override any other conflicting
settings.
* Any `extra` parameters that `svgwrite.Drawing()` accepts can be
controlled by passing them in through `svg_attributes`.
* The unit of length here is assumed to be pixels in all variables.
* If this function is used multiple times in quick succession to
display multiple SVGs (all using the default filename), the
svgviewer/browser will likely fail to load some of the SVGs in time.
To fix this, use the timestamp attribute, or give the files unique
names, or use a pause command (e.g. time.sleep(1)) between uses.
SEE ALSO:
* document.py
"""
_default_relative_node_radius = 5e-3
_default_relative_stroke_width = 1e-3
_default_path_color = '#000000' # black
_default_node_color = '#ff0000' # red
_default_font_size = 12
if filename is None:
timestamp = True if timestamp is None else timestamp
filename = os_path.join(gettempdir(), 'disvg_output.svg')
# append time stamp to filename
if timestamp:
fbname, fext = os_path.splitext(filename)
dirname = os_path.dirname(filename)
tstamp = str(time()).replace('.', '')
stfilename = os_path.split(fbname)[1] + '_' + tstamp + fext
filename = os_path.join(dirname, stfilename)
# check paths and colors are set
if isinstance(paths, Path) or is_path_segment(paths):
paths = [paths]
if paths:
if not colors:
colors = [_default_path_color] * len(paths)
else:
assert len(colors) == len(paths)
if isinstance(colors, str):
colors = str2colorlist(colors,
default_color=_default_path_color)
elif isinstance(colors, list):
for idx, c in enumerate(colors):
if is3tuple(c):
colors[idx] = "rgb" + str(c)
# check nodes and nodes_colors are set (node_radii are set later)
if nodes:
if not node_colors:
node_colors = [_default_node_color] * len(nodes)
else:
assert len(node_colors) == len(nodes)
if isinstance(node_colors, str):
node_colors = str2colorlist(node_colors,
default_color=_default_node_color)
elif isinstance(node_colors, list):
for idx, c in enumerate(node_colors):
if is3tuple(c):
node_colors[idx] = "rgb" + str(c)
# set up the viewBox and display dimensions of the output SVG
# along the way, set stroke_widths and node_radii if not provided
assert paths or nodes
stuff2bound = []
if viewbox:
if not isinstance(viewbox, str):
viewbox = '%s %s %s %s' % viewbox
if dimensions is None:
dimensions = viewbox.split(' ')[2:4]
elif dimensions:
dimensions = tuple(map(str, dimensions))
def strip_units(s):
return re.search(r'\d*\.?\d*', s.strip()).group()
viewbox = '0 0 %s %s' % tuple(map(strip_units, dimensions))
else:
if paths:
stuff2bound += paths
if nodes:
stuff2bound += nodes
if text_path:
stuff2bound += text_path
xmin, xmax, ymin, ymax = big_bounding_box(stuff2bound)
dx = xmax - xmin
dy = ymax - ymin
if dx == 0:
dx = 1
if dy == 0:
dy = 1
# determine stroke_widths to use (if not provided) and max_stroke_width
if paths:
if not stroke_widths:
sw = max(dx, dy) * _default_relative_stroke_width
stroke_widths = [sw]*len(paths)
max_stroke_width = sw
else:
assert len(paths) == len(stroke_widths)
max_stroke_width = max(stroke_widths)
else:
max_stroke_width = 0
# determine node_radii to use (if not provided) and max_node_diameter
if nodes:
if not node_radii:
r = max(dx, dy) * _default_relative_node_radius
node_radii = [r]*len(nodes)
max_node_diameter = 2*r
else:
assert len(nodes) == len(node_radii)
max_node_diameter = 2*max(node_radii)
else:
max_node_diameter = 0
extra_space_for_style = max(max_stroke_width, max_node_diameter)
xmin -= margin_size*dx + extra_space_for_style/2
ymin -= margin_size*dy + extra_space_for_style/2
dx += 2*margin_size*dx + extra_space_for_style
dy += 2*margin_size*dy + extra_space_for_style
viewbox = "%s %s %s %s" % (xmin, ymin, dx, dy)
if mindim is None:
szx = "{}{}".format(dx, baseunit)
szy = "{}{}".format(dy, baseunit)
else:
if dx > dy:
szx = str(mindim) + baseunit
szy = str(int(ceil(mindim * dy / dx))) + baseunit
else:
szx = str(int(ceil(mindim * dx / dy))) + baseunit
szy = str(mindim) + baseunit
dimensions = szx, szy
# Create an SVG file
if svg_attributes is not None:
dimensions = (svg_attributes.get("width", dimensions[0]),
svg_attributes.get("height", dimensions[1]))
debug = svg_attributes.get("debug", svgwrite_debug)
dwg = Drawing(filename=filename, size=dimensions, debug=debug,
**svg_attributes)
else:
dwg = Drawing(filename=filename, size=dimensions, debug=svgwrite_debug,
viewBox=viewbox)
# add paths
if paths:
for i, p in enumerate(paths):
if isinstance(p, Path):
ps = p.d()
elif is_path_segment(p):
ps = Path(p).d()
else: # assume this path, p, was input as a Path d-string
ps = p
if attributes:
good_attribs = {'d': ps}
for key in attributes[i]:
val = attributes[i][key]
if key != 'd':
try:
dwg.path(ps, **{key: val})
good_attribs.update({key: val})
except Exception as e:
warn(str(e))
dwg.add(dwg.path(**good_attribs))
else:
dwg.add(dwg.path(ps, stroke=colors[i],
stroke_width=str(stroke_widths[i]),
fill='none'))
# add nodes (filled in circles)
if nodes:
for i_pt, pt in enumerate([(z.real, z.imag) for z in nodes]):
dwg.add(dwg.circle(pt, node_radii[i_pt], fill=node_colors[i_pt]))
# add texts
if text:
assert isinstance(text, str) or (isinstance(text, list) and
isinstance(text_path, list) and
len(text_path) == len(text))
if isinstance(text, str):
text = [text]
if not font_size:
font_size = [_default_font_size]
if not text_path:
pos = complex(xmin + margin_size*dx, ymin + margin_size*dy)
text_path = [Line(pos, pos + 1).d()]
else:
if font_size:
if isinstance(font_size, list):
assert len(font_size) == len(text)
else:
font_size = [font_size] * len(text)
else:
font_size = [_default_font_size] * len(text)
for idx, s in enumerate(text):
p = text_path[idx]
if isinstance(p, Path):
ps = p.d()
elif is_path_segment(p):
ps = Path(p).d()
else: # assume this path, p, was input as a Path d-string
ps = p
# paragraph = dwg.add(dwg.g(font_size=font_size[idx]))
# paragraph.add(dwg.textPath(ps, s))
pathid = 'tp' + str(idx)
dwg.defs.add(dwg.path(d=ps, id=pathid))
txter = dwg.add(dwg.text('', font_size=font_size[idx]))
txter.add(txt.TextPath('#'+pathid, s))
if paths2Drawing:
return dwg
# save svg
if not os_path.exists(os_path.dirname(filename)):
makedirs(os_path.dirname(filename))
dwg.save()
# re-open the svg, make the xml pretty, and save it again
xmlstring = md_xml_parse(filename).toprettyxml()
with open(filename, 'w') as f:
f.write(xmlstring)
# try to open in web browser
if openinbrowser:
try:
open_in_browser(filename)
except:
print("Failed to open output SVG in browser. SVG saved to:")
print(filename)
def wsvg(paths=None, colors=None, filename=None, stroke_widths=None,
nodes=None, node_colors=None, node_radii=None,
openinbrowser=False, timestamp=False, margin_size=0.1,
mindim=600, dimensions=None, viewbox=None, text=None,
text_path=None, font_size=None, attributes=None,
svg_attributes=None, svgwrite_debug=False,
paths2Drawing=False, baseunit='px'):
"""Create SVG and write to disk.
Note: This is identical to `disvg()` except that `openinbrowser`
is false by default and an assertion error is raised if `filename
is None`.
See `disvg()` docstring for more info.
"""
assert filename is not None
return disvg(paths, colors=colors, filename=filename,
stroke_widths=stroke_widths, nodes=nodes,
node_colors=node_colors, node_radii=node_radii,
openinbrowser=openinbrowser, timestamp=timestamp,
margin_size=margin_size, mindim=mindim,
dimensions=dimensions, viewbox=viewbox, text=text,
text_path=text_path, font_size=font_size,
attributes=attributes, svg_attributes=svg_attributes,
svgwrite_debug=svgwrite_debug,
paths2Drawing=paths2Drawing, baseunit=baseunit)
def paths2Drawing(paths=None, colors=None, filename=None,
stroke_widths=None, nodes=None, node_colors=None,
node_radii=None, openinbrowser=False, timestamp=False,
margin_size=0.1, mindim=600, dimensions=None,
viewbox=None, text=None, text_path=None,
font_size=None, attributes=None, svg_attributes=None,
svgwrite_debug=False, paths2Drawing=True, baseunit='px'):
"""Create and return `svg.Drawing` object.
Note: This is identical to `disvg()` except that `paths2Drawing`
is true by default and an assertion error is raised if `filename
is None`.
See `disvg()` docstring for more info.
"""
return disvg(paths, colors=colors, filename=filename,
stroke_widths=stroke_widths, nodes=nodes,
node_colors=node_colors, node_radii=node_radii,
openinbrowser=openinbrowser, timestamp=timestamp,
margin_size=margin_size, mindim=mindim,
dimensions=dimensions, viewbox=viewbox, text=text,
text_path=text_path, font_size=font_size,
attributes=attributes, svg_attributes=svg_attributes,
svgwrite_debug=svgwrite_debug,
paths2Drawing=paths2Drawing, baseunit=baseunit)
|
mit
| -2,271,244,267,651,352,000
| 38.559252
| 79
| 0.567952
| false
| 4.000841
| false
| false
| false
|
regebro/doctrine.urwid
|
docs/conf.py
|
1
|
7899
|
# -*- coding: utf-8 -*-
#
# doctrine.urwid documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 11 19:35:12 2015.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.todo',
'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'doctrine.urwid'
copyright = u'2015, Lennart Regebro'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'doctrineurwiddoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'doctrineurwid.tex', u'doctrine.urwid Documentation',
u'Lennart Regebro', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'doctrineurwid', u'doctrine.urwid Documentation',
[u'Lennart Regebro'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'doctrineurwid', u'doctrine.urwid Documentation',
u'Lennart Regebr0', 'doctrineurwid', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
|
mit
| 7,486,623,617,247,903,000
| 31.506173
| 80
| 0.704773
| false
| 3.725943
| true
| false
| false
|
nburn42/tensorflow
|
tensorflow/contrib/eager/python/datasets.py
|
1
|
6160
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Iteration over tf.data.Datasets when eager execution is enabled."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
from tensorflow.contrib.data.python.ops import prefetching_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.data.util import nest
from tensorflow.python.data.util import sparse
from tensorflow.python.eager import context
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.training.checkpointable import base as checkpointable
from tensorflow.python.training.saver import BaseSaverBuilder
_uid_counter = 0
_uid_lock = threading.Lock()
def _generate_shared_name(prefix):
with _uid_lock:
global _uid_counter
uid = _uid_counter
_uid_counter += 1
return "{}{}".format(prefix, uid)
class Iterator(iterator_ops.EagerIterator, checkpointable.CheckpointableBase):
"""An iterator producing tf.Tensor objects from a tf.data.Dataset.
NOTE: Unlike the iterator created by the
@{tf.data.Dataset.make_one_shot_iterator} method, this class enables
additional experimental functionality, such as prefetching to the GPU.
"""
def __init__(self, dataset):
"""Creates a new iterator over the given dataset.
For example:
```python
dataset = tf.data.Dataset.range(4)
for x in Iterator(dataset):
print(x)
```
Tensors produced will be placed on the device on which this iterator object
was created.
Args:
dataset: A `tf.data.Dataset` object.
Raises:
TypeError: If `dataset` is an unsupported type.
RuntimeError: When invoked without eager execution enabled.
"""
if isinstance(dataset, prefetching_ops._PrefetchToDeviceDataset): # pylint: disable=protected-access
raise TypeError(
"`tf.contrib.data.prefetch_to_device()` is not compatible with "
"`tf.contrib.eager.Iterator`. Use `for ... in dataset:` to iterate "
"over the dataset instead.")
super(Iterator, self).__init__(dataset)
if not context.context().device_spec.device_type:
is_remote_device = False
else:
is_remote_device = context.context().device_spec.device_type != "CPU"
self._buffer_resource_handle = None
if is_remote_device:
with ops.device("/device:CPU:0"):
iter_string_handle = gen_dataset_ops.iterator_to_string_handle(
self._resource)
@function.Defun(dtypes.string)
def remote_fn(h):
remote_iterator = iterator_ops.Iterator.from_string_handle(
h, self.output_types, self.output_shapes, self.output_classes)
return remote_iterator.get_next()
remote_fn.add_to_graph(None)
target = constant_op.constant("/device:CPU:0")
with ops.device(self._device):
self._buffer_resource_handle = prefetching_ops.function_buffering_resource( # pylint: disable=line-too-long
string_arg=iter_string_handle,
f=remote_fn,
target_device=target,
buffer_size=10,
container="",
shared_name=_generate_shared_name("function_buffer_resource"))
self._buffer_resource_deleter = resource_variable_ops.EagerResourceDeleter( # pylint: disable=line-too-long
handle=self._buffer_resource_handle,
handle_device=self._device)
def _next_internal(self):
"""Returns a nested structure of `tf.Tensor`s containing the next element.
"""
# This runs in sync mode as iterators use an error status to communicate
# that there is no more data to iterate over.
# TODO(b/77291417): Fix
with context.execution_mode(context.SYNC):
if self._buffer_resource_handle is not None:
with ops.device(self._device):
ret = prefetching_ops.function_buffering_resource_get_next(
function_buffer_resource=self._buffer_resource_handle,
output_types=self._flat_output_types)
return sparse.deserialize_sparse_tensors(
nest.pack_sequence_as(self._output_types, ret), self._output_types,
self._output_shapes, self._output_classes)
else:
return super(Iterator, self)._next_internal()
# TODO(shivaniagrawal): Expose checkpointable stateful objects from dataset
# attributes(potential).
class _Saveable(BaseSaverBuilder.SaveableObject):
"""SaveableObject for saving/restoring iterator state."""
def __init__(self, iterator_resource, name):
serialized_iterator = gen_dataset_ops.serialize_iterator(
iterator_resource)
specs = [
BaseSaverBuilder.SaveSpec(serialized_iterator, "", name + "_STATE")
]
# pylint: disable=protected-access
super(Iterator._Saveable, self).__init__(iterator_resource, specs, name)
def restore(self, restored_tensors, restored_shapes):
with ops.colocate_with(self.op):
return gen_dataset_ops.deserialize_iterator(self.op,
restored_tensors[0])
def _gather_saveables_for_checkpoint(self):
def _saveable_factory(name):
return self._Saveable(self._resource, name)
return {"ITERATOR": _saveable_factory}
|
apache-2.0
| -1,686,156,188,781,747,200
| 38.235669
| 116
| 0.68539
| false
| 4.128686
| false
| false
| false
|
matandobr/Mobile-Security-Framework-MobSF
|
StaticAnalyzer/views/android/generate_downloads.py
|
1
|
1944
|
# -*- coding: utf_8 -*-
"""Generate Zipped downloads."""
import logging
import os
import re
import shutil
from django.conf import settings
from django.shortcuts import redirect
from MobSF.utils import print_n_send_error_response
logger = logging.getLogger(__name__)
def run(request):
"""Generate downloads for apk, java and smali."""
try:
logger.info('Generating Downloads')
md5 = request.GET['hash']
file_type = request.GET['file_type']
match = re.match('^[0-9a-f]{32}$', md5)
if not match and file_type not in ['apk', 'smali', 'java']:
logger.exception('Invalid options')
return print_n_send_error_response(request,
'Invalid options')
app_dir = os.path.join(settings.UPLD_DIR, md5)
file_name = ''
if file_type == 'java':
# For Java
file_name = md5 + '-java'
directory = os.path.join(app_dir, 'java_source/')
dwd_dir = os.path.join(settings.DWD_DIR, file_name)
shutil.make_archive(dwd_dir, 'zip', directory)
file_name = file_name + '.zip'
elif file_type == 'smali':
# For Smali
file_name = md5 + '-smali'
directory = os.path.join(app_dir, 'smali_source/')
dwd_dir = os.path.join(settings.DWD_DIR, file_name)
shutil.make_archive(dwd_dir, 'zip', directory)
file_name = file_name + '.zip'
elif file_type == 'apk':
file_name = md5 + '.apk'
src = os.path.join(app_dir, file_name)
dst = os.path.join(settings.DWD_DIR, file_name)
shutil.copy2(src, dst)
return redirect('/download/' + file_name)
except Exception:
logger.exception('Generating Downloads')
return print_n_send_error_response(request,
'Generating Downloads')
|
gpl-3.0
| -6,746,440,024,267,676,000
| 35.679245
| 67
| 0.55607
| false
| 3.717017
| false
| false
| false
|
isislovecruft/arm
|
arm/controller.py
|
1
|
18726
|
"""
Main interface loop for arm, periodically redrawing the screen and issuing
user input to the proper panels.
"""
import os
import time
import curses
import threading
import arm.arguments
import arm.menu.menu
import arm.popups
import arm.headerPanel
import arm.logPanel
import arm.configPanel
import arm.torrcPanel
import arm.graphing.graphPanel
import arm.graphing.bandwidthStats
import arm.graphing.connStats
import arm.graphing.resourceStats
import arm.connections.connPanel
import arm.util.tracker
from stem.control import State
from arm.util import panel, torConfig, torTools
from stem.util import conf, enum, log, system
ARM_CONTROLLER = None
def conf_handler(key, value):
if key == "features.redrawRate":
return max(1, value)
elif key == "features.refreshRate":
return max(0, value)
CONFIG = conf.config_dict("arm", {
"startup.events": "N3",
"startup.data_directory": "~/.arm",
"features.panels.show.graph": True,
"features.panels.show.log": True,
"features.panels.show.connection": True,
"features.panels.show.config": True,
"features.panels.show.torrc": True,
"features.redrawRate": 5,
"features.refreshRate": 5,
"features.confirmQuit": True,
"features.graph.type": 1,
"features.graph.bw.prepopulate": True,
"start_time": 0,
}, conf_handler)
GraphStat = enum.Enum("BANDWIDTH", "CONNECTIONS", "SYSTEM_RESOURCES")
# maps 'features.graph.type' config values to the initial types
GRAPH_INIT_STATS = {1: GraphStat.BANDWIDTH, 2: GraphStat.CONNECTIONS, 3: GraphStat.SYSTEM_RESOURCES}
def get_controller():
"""
Provides the arm controller instance.
"""
return ARM_CONTROLLER
def stop_controller():
"""
Halts our Controller, providing back the thread doing so.
"""
def halt_controller():
control = get_controller()
if control:
for panel_impl in control.get_daemon_panels():
panel_impl.stop()
for panel_impl in control.get_daemon_panels():
panel_impl.join()
halt_thread = threading.Thread(target = halt_controller)
halt_thread.start()
return halt_thread
def init_controller(stdscr, start_time):
"""
Spawns the controller, and related panels for it.
Arguments:
stdscr - curses window
"""
global ARM_CONTROLLER
# initializes the panels
sticky_panels = [
arm.headerPanel.HeaderPanel(stdscr, start_time),
LabelPanel(stdscr),
]
page_panels, first_page_panels = [], []
# first page: graph and log
if CONFIG["features.panels.show.graph"]:
first_page_panels.append(arm.graphing.graphPanel.GraphPanel(stdscr))
if CONFIG["features.panels.show.log"]:
expanded_events = arm.arguments.expand_events(CONFIG["startup.events"])
first_page_panels.append(arm.logPanel.LogPanel(stdscr, expanded_events))
if first_page_panels:
page_panels.append(first_page_panels)
# second page: connections
if CONFIG["features.panels.show.connection"]:
page_panels.append([arm.connections.connPanel.ConnectionPanel(stdscr)])
# The DisableDebuggerAttachment will prevent our connection panel from really
# functioning. It'll have circuits, but little else. If this is the case then
# notify the user and tell them what they can do to fix it.
controller = torTools.get_conn().controller
if controller.get_conf("DisableDebuggerAttachment", None) == "1":
log.notice("Tor is preventing system utilities like netstat and lsof from working. This means that arm can't provide you with connection information. You can change this by adding 'DisableDebuggerAttachment 0' to your torrc and restarting tor. For more information see...\nhttps://trac.torproject.org/3313")
arm.util.tracker.get_connection_tracker().set_paused(True)
else:
# Configures connection resoultions. This is paused/unpaused according to
# if Tor's connected or not.
controller.add_status_listener(conn_reset_listener)
tor_pid = controller.get_pid(None)
if tor_pid:
# use the tor pid to help narrow connection results
tor_cmd = system.get_name_by_pid(tor_pid)
if tor_cmd is None:
tor_cmd = "tor"
resolver = arm.util.tracker.get_connection_tracker()
log.info("Operating System: %s, Connection Resolvers: %s" % (os.uname()[0], ", ".join(resolver._resolvers)))
resolver.start()
else:
# constructs singleton resolver and, if tor isn't connected, initizes
# it to be paused
arm.util.tracker.get_connection_tracker().set_paused(not controller.is_alive())
# third page: config
if CONFIG["features.panels.show.config"]:
page_panels.append([arm.configPanel.ConfigPanel(stdscr, arm.configPanel.State.TOR)])
# fourth page: torrc
if CONFIG["features.panels.show.torrc"]:
page_panels.append([arm.torrcPanel.TorrcPanel(stdscr, arm.torrcPanel.Config.TORRC)])
# initializes the controller
ARM_CONTROLLER = Controller(stdscr, sticky_panels, page_panels)
# additional configuration for the graph panel
graph_panel = ARM_CONTROLLER.get_panel("graph")
if graph_panel:
# statistical monitors for graph
bw_stats = arm.graphing.bandwidthStats.BandwidthStats()
graph_panel.add_stats(GraphStat.BANDWIDTH, bw_stats)
graph_panel.add_stats(GraphStat.SYSTEM_RESOURCES, arm.graphing.resourceStats.ResourceStats())
if CONFIG["features.panels.show.connection"]:
graph_panel.add_stats(GraphStat.CONNECTIONS, arm.graphing.connStats.ConnStats())
# sets graph based on config parameter
try:
initial_stats = GRAPH_INIT_STATS.get(CONFIG["features.graph.type"])
graph_panel.set_stats(initial_stats)
except ValueError:
pass # invalid stats, maybe connections when lookups are disabled
# prepopulates bandwidth values from state file
if CONFIG["features.graph.bw.prepopulate"] and torTools.get_conn().is_alive():
is_successful = bw_stats.prepopulate_from_state()
if is_successful:
graph_panel.update_interval = 4
class LabelPanel(panel.Panel):
"""
Panel that just displays a single line of text.
"""
def __init__(self, stdscr):
panel.Panel.__init__(self, stdscr, "msg", 0, height=1)
self.msg_text = ""
self.msg_attr = curses.A_NORMAL
def set_message(self, msg, attr = None):
"""
Sets the message being displayed by the panel.
Arguments:
msg - string to be displayed
attr - attribute for the label, normal text if undefined
"""
if attr is None:
attr = curses.A_NORMAL
self.msg_text = msg
self.msg_attr = attr
def draw(self, width, height):
self.addstr(0, 0, self.msg_text, self.msg_attr)
class Controller:
"""
Tracks the global state of the interface
"""
def __init__(self, stdscr, sticky_panels, page_panels):
"""
Creates a new controller instance. Panel lists are ordered as they appear,
top to bottom on the page.
Arguments:
stdscr - curses window
sticky_panels - panels shown at the top of each page
page_panels - list of pages, each being a list of the panels on it
"""
self._screen = stdscr
self._sticky_panels = sticky_panels
self._page_panels = page_panels
self._page = 0
self._is_paused = False
self._force_redraw = False
self._is_done = False
self._last_drawn = 0
self.set_msg() # initializes our control message
def get_screen(self):
"""
Provides our curses window.
"""
return self._screen
def get_page_count(self):
"""
Provides the number of pages the interface has. This may be zero if all
page panels have been disabled.
"""
return len(self._page_panels)
def get_page(self):
"""
Provides the number belonging to this page. Page numbers start at zero.
"""
return self._page
def set_page(self, page_number):
"""
Sets the selected page, raising a ValueError if the page number is invalid.
Arguments:
page_number - page number to be selected
"""
if page_number < 0 or page_number >= self.get_page_count():
raise ValueError("Invalid page number: %i" % page_number)
if page_number != self._page:
self._page = page_number
self._force_redraw = True
self.set_msg()
def next_page(self):
"""
Increments the page number.
"""
self.set_page((self._page + 1) % len(self._page_panels))
def prev_page(self):
"""
Decrements the page number.
"""
self.set_page((self._page - 1) % len(self._page_panels))
def is_paused(self):
"""
True if the interface is paused, false otherwise.
"""
return self._is_paused
def set_paused(self, is_pause):
"""
Sets the interface to be paused or unpaused.
"""
if is_pause != self._is_paused:
self._is_paused = is_pause
self._force_redraw = True
self.set_msg()
for panel_impl in self.get_all_panels():
panel_impl.set_paused(is_pause)
def get_panel(self, name):
"""
Provides the panel with the given identifier. This returns None if no such
panel exists.
Arguments:
name - name of the panel to be fetched
"""
for panel_impl in self.get_all_panels():
if panel_impl.get_name() == name:
return panel_impl
return None
def get_sticky_panels(self):
"""
Provides the panels visibile at the top of every page.
"""
return list(self._sticky_panels)
def get_display_panels(self, page_number = None, include_sticky = True):
"""
Provides all panels belonging to a page and sticky content above it. This
is ordered they way they are presented (top to bottom) on the page.
Arguments:
page_number - page number of the panels to be returned, the current
page if None
include_sticky - includes sticky panels in the results if true
"""
return_page = self._page if page_number is None else page_number
if self._page_panels:
if include_sticky:
return self._sticky_panels + self._page_panels[return_page]
else:
return list(self._page_panels[return_page])
else:
return self._sticky_panels if include_sticky else []
def get_daemon_panels(self):
"""
Provides thread panels.
"""
thread_panels = []
for panel_impl in self.get_all_panels():
if isinstance(panel_impl, threading.Thread):
thread_panels.append(panel_impl)
return thread_panels
def get_all_panels(self):
"""
Provides all panels in the interface.
"""
all_panels = list(self._sticky_panels)
for page in self._page_panels:
all_panels += list(page)
return all_panels
def redraw(self, force = True):
"""
Redraws the displayed panel content.
Arguments:
force - redraws reguardless of if it's needed if true, otherwise ignores
the request when there arne't changes to be displayed
"""
force |= self._force_redraw
self._force_redraw = False
current_time = time.time()
if CONFIG["features.refreshRate"] != 0:
if self._last_drawn + CONFIG["features.refreshRate"] <= current_time:
force = True
display_panels = self.get_display_panels()
occupied_content = 0
for panel_impl in display_panels:
panel_impl.set_top(occupied_content)
occupied_content += panel_impl.get_height()
# apparently curses may cache display contents unless we explicitely
# request a redraw here...
# https://trac.torproject.org/projects/tor/ticket/2830#comment:9
if force:
self._screen.clear()
for panel_impl in display_panels:
panel_impl.redraw(force)
if force:
self._last_drawn = current_time
def request_redraw(self):
"""
Requests that all content is redrawn when the interface is next rendered.
"""
self._force_redraw = True
def get_last_redraw_time(self):
"""
Provides the time when the content was last redrawn, zero if the content
has never been drawn.
"""
return self._last_drawn
def set_msg(self, msg = None, attr = None, redraw = False):
"""
Sets the message displayed in the interfaces control panel. This uses our
default prompt if no arguments are provided.
Arguments:
msg - string to be displayed
attr - attribute for the label, normal text if undefined
redraw - redraws right away if true, otherwise redraws when display
content is next normally drawn
"""
if msg is None:
msg = ""
if attr is None:
if not self._is_paused:
msg = "page %i / %i - m: menu, p: pause, h: page help, q: quit" % (self._page + 1, len(self._page_panels))
attr = curses.A_NORMAL
else:
msg = "Paused"
attr = curses.A_STANDOUT
control_panel = self.get_panel("msg")
control_panel.set_message(msg, attr)
if redraw:
control_panel.redraw(True)
else:
self._force_redraw = True
def get_data_directory(self):
"""
Provides the path where arm's resources are being placed. The path ends
with a slash and is created if it doesn't already exist.
"""
data_dir = os.path.expanduser(CONFIG["startup.data_directory"])
if not data_dir.endswith("/"):
data_dir += "/"
if not os.path.exists(data_dir):
os.makedirs(data_dir)
return data_dir
def is_done(self):
"""
True if arm should be terminated, false otherwise.
"""
return self._is_done
def quit(self):
"""
Terminates arm after the input is processed. Optionally if we're connected
to a arm generated tor instance then this may check if that should be shut
down too.
"""
self._is_done = True
# check if the torrc has a "ARM_SHUTDOWN" comment flag, if so then shut
# down the instance
is_shutdown_flag_present = False
torrc_contents = torConfig.get_torrc().get_contents()
if torrc_contents:
for line in torrc_contents:
if "# ARM_SHUTDOWN" in line:
is_shutdown_flag_present = True
break
if is_shutdown_flag_present:
try:
torTools.get_conn().shutdown()
except IOError as exc:
arm.popups.show_msg(str(exc), 3, curses.A_BOLD)
def heartbeat_check(is_unresponsive):
"""
Logs if its been ten seconds since the last BW event.
Arguments:
is_unresponsive - flag for if we've indicated to be responsive or not
"""
conn = torTools.get_conn()
last_heartbeat = conn.controller.get_latest_heartbeat()
if conn.is_alive():
if not is_unresponsive and (time.time() - last_heartbeat) >= 10:
is_unresponsive = True
log.notice("Relay unresponsive (last heartbeat: %s)" % time.ctime(last_heartbeat))
elif is_unresponsive and (time.time() - last_heartbeat) < 10:
# really shouldn't happen (meant Tor froze for a bit)
is_unresponsive = False
log.notice("Relay resumed")
return is_unresponsive
def conn_reset_listener(controller, event_type, _):
"""
Pauses connection resolution when tor's shut down, and resumes with the new
pid if started again.
"""
resolver = arm.util.tracker.get_connection_tracker()
if resolver.is_alive():
resolver.set_paused(event_type == State.CLOSED)
if event_type in (State.INIT, State.RESET):
# Reload the torrc contents. If the torrc panel is present then it will
# do this instead since it wants to do validation and redraw _after_ the
# new contents are loaded.
if get_controller().get_panel("torrc") is None:
torConfig.get_torrc().load(True)
def start_arm(stdscr):
"""
Main draw loop context.
Arguments:
stdscr - curses window
"""
start_time = CONFIG['start_time']
init_controller(stdscr, start_time)
control = get_controller()
# provides notice about any unused config keys
for key in conf.get_config("arm").unused_keys():
log.notice("Unused configuration entry: %s" % key)
# tells daemon panels to start
for panel_impl in control.get_daemon_panels():
panel_impl.start()
# allows for background transparency
try:
curses.use_default_colors()
except curses.error:
pass
# makes the cursor invisible
try:
curses.curs_set(0)
except curses.error:
pass
# logs the initialization time
log.info("arm started (initialization took %0.3f seconds)" % (time.time() - start_time))
# main draw loop
override_key = None # uses this rather than waiting on user input
is_unresponsive = False # flag for heartbeat responsiveness check
while not control.is_done():
display_panels = control.get_display_panels()
is_unresponsive = heartbeat_check(is_unresponsive)
# sets panel visability
for panel_impl in control.get_all_panels():
panel_impl.set_visible(panel_impl in display_panels)
# redraws the interface if it's needed
control.redraw(False)
stdscr.refresh()
# wait for user keyboard input until timeout, unless an override was set
if override_key:
key, override_key = override_key, None
else:
curses.halfdelay(CONFIG["features.redrawRate"] * 10)
key = stdscr.getch()
if key == curses.KEY_RIGHT:
control.next_page()
elif key == curses.KEY_LEFT:
control.prev_page()
elif key == ord('p') or key == ord('P'):
control.set_paused(not control.is_paused())
elif key == ord('m') or key == ord('M'):
arm.menu.menu.show_menu()
elif key == ord('q') or key == ord('Q'):
# provides prompt to confirm that arm should exit
if CONFIG["features.confirmQuit"]:
msg = "Are you sure (q again to confirm)?"
confirmation_key = arm.popups.show_msg(msg, attr = curses.A_BOLD)
quit_confirmed = confirmation_key in (ord('q'), ord('Q'))
else:
quit_confirmed = True
if quit_confirmed:
control.quit()
elif key == ord('x') or key == ord('X'):
# provides prompt to confirm that arm should issue a sighup
msg = "This will reset Tor's internal state. Are you sure (x again to confirm)?"
confirmation_key = arm.popups.show_msg(msg, attr = curses.A_BOLD)
if confirmation_key in (ord('x'), ord('X')):
try:
torTools.get_conn().reload()
except IOError as exc:
log.error("Error detected when reloading tor: %s" % exc.strerror)
elif key == ord('h') or key == ord('H'):
override_key = arm.popups.show_help_popup()
elif key == ord('l') - 96:
# force redraw when ctrl+l is pressed
control.redraw(True)
else:
for panel_impl in display_panels:
is_keystroke_consumed = panel_impl.handle_key(key)
if is_keystroke_consumed:
break
|
gpl-3.0
| -8,813,717,470,918,531,000
| 26.297376
| 313
| 0.659885
| false
| 3.714002
| true
| false
| false
|
ty707/airflow
|
airflow/jobs.py
|
1
|
43857
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from past.builtins import basestring
from collections import defaultdict, Counter
from datetime import datetime, timedelta
import getpass
import logging
import socket
import subprocess
import multiprocessing
import math
from time import sleep
from sqlalchemy import Column, Integer, String, DateTime, func, Index, or_
from sqlalchemy.orm.session import make_transient
from airflow import executors, models, settings
from airflow import configuration as conf
from airflow.exceptions import AirflowException
from airflow.utils.state import State
from airflow.utils.db import provide_session, pessimistic_connection_handling
from airflow.utils.email import send_email
from airflow.utils.logging import LoggingMixin
from airflow.utils import asciiart
from airflow.settings import Stats
DagRun = models.DagRun
Base = models.Base
ID_LEN = models.ID_LEN
Stats = settings.Stats
class BaseJob(Base, LoggingMixin):
"""
Abstract class to be derived for jobs. Jobs are processing items with state
and duration that aren't task instances. For instance a BackfillJob is
a collection of task instance runs, but should have it's own state, start
and end time.
"""
__tablename__ = "job"
id = Column(Integer, primary_key=True)
dag_id = Column(String(ID_LEN),)
state = Column(String(20))
job_type = Column(String(30))
start_date = Column(DateTime())
end_date = Column(DateTime())
latest_heartbeat = Column(DateTime())
executor_class = Column(String(500))
hostname = Column(String(500))
unixname = Column(String(1000))
__mapper_args__ = {
'polymorphic_on': job_type,
'polymorphic_identity': 'BaseJob'
}
__table_args__ = (
Index('job_type_heart', job_type, latest_heartbeat),
)
def __init__(
self,
executor=executors.DEFAULT_EXECUTOR,
heartrate=conf.getfloat('scheduler', 'JOB_HEARTBEAT_SEC'),
*args, **kwargs):
self.hostname = socket.getfqdn()
self.executor = executor
self.executor_class = executor.__class__.__name__
self.start_date = datetime.now()
self.latest_heartbeat = datetime.now()
self.heartrate = heartrate
self.unixname = getpass.getuser()
super(BaseJob, self).__init__(*args, **kwargs)
def is_alive(self):
return (
(datetime.now() - self.latest_heartbeat).seconds <
(conf.getint('scheduler', 'JOB_HEARTBEAT_SEC') * 2.1)
)
def kill(self):
session = settings.Session()
job = session.query(BaseJob).filter(BaseJob.id == self.id).first()
job.end_date = datetime.now()
try:
self.on_kill()
except:
self.logger.error('on_kill() method failed')
session.merge(job)
session.commit()
session.close()
raise AirflowException("Job shut down externally.")
def on_kill(self):
'''
Will be called when an external kill command is received
'''
pass
def heartbeat_callback(self, session=None):
pass
def heartbeat(self):
'''
Heartbeats update the job's entry in the database with a timestamp
for the latest_heartbeat and allows for the job to be killed
externally. This allows at the system level to monitor what is
actually active.
For instance, an old heartbeat for SchedulerJob would mean something
is wrong.
This also allows for any job to be killed externally, regardless
of who is running it or on which machine it is running.
Note that if your heartbeat is set to 60 seconds and you call this
method after 10 seconds of processing since the last heartbeat, it
will sleep 50 seconds to complete the 60 seconds and keep a steady
heart rate. If you go over 60 seconds before calling it, it won't
sleep at all.
'''
session = settings.Session()
job = session.query(BaseJob).filter_by(id=self.id).one()
if job.state == State.SHUTDOWN:
self.kill()
if job.latest_heartbeat:
sleep_for = self.heartrate - (
datetime.now() - job.latest_heartbeat).total_seconds()
if sleep_for > 0:
sleep(sleep_for)
job.latest_heartbeat = datetime.now()
session.merge(job)
session.commit()
self.heartbeat_callback(session=session)
session.close()
self.logger.debug('[heart] Boom.')
def run(self):
Stats.incr(self.__class__.__name__.lower()+'_start', 1, 1)
# Adding an entry in the DB
session = settings.Session()
self.state = State.RUNNING
session.add(self)
session.commit()
id_ = self.id
make_transient(self)
self.id = id_
# Run
self._execute()
# Marking the success in the DB
self.end_date = datetime.now()
self.state = State.SUCCESS
session.merge(self)
session.commit()
session.close()
Stats.incr(self.__class__.__name__.lower()+'_end', 1, 1)
def _execute(self):
raise NotImplementedError("This method needs to be overridden")
class SchedulerJob(BaseJob):
"""
This SchedulerJob runs indefinitely and constantly schedules the jobs
that are ready to run. It figures out the latest runs for each
task and see if the dependencies for the next schedules are met.
If so it triggers the task instance. It does this for each task
in each DAG and repeats.
:param dag_id: to run the scheduler for a single specific DAG
:type dag_id: string
:param subdir: to search for DAG under a certain folder only
:type subdir: string
:param test_mode: used for unit testing this class only, runs a single
schedule run
:type test_mode: bool
:param refresh_dags_every: force refresh the DAG definition every N
runs, as specified here
:type refresh_dags_every: int
:param do_pickle: to pickle the DAG object and send over to workers
for non-local executors
:type do_pickle: bool
"""
__mapper_args__ = {
'polymorphic_identity': 'SchedulerJob'
}
def __init__(
self,
dag_id=None,
dag_ids=None,
subdir=None,
test_mode=False,
refresh_dags_every=10,
num_runs=None,
do_pickle=False,
*args, **kwargs):
# for BaseJob compatibility
self.dag_id = dag_id
self.dag_ids = [dag_id] if dag_id else []
if dag_ids:
self.dag_ids.extend(dag_ids)
self.subdir = subdir
if test_mode:
self.num_runs = 1
else:
self.num_runs = num_runs
self.refresh_dags_every = refresh_dags_every
self.do_pickle = do_pickle
super(SchedulerJob, self).__init__(*args, **kwargs)
self.heartrate = conf.getint('scheduler', 'SCHEDULER_HEARTBEAT_SEC')
self.max_threads = min(conf.getint('scheduler', 'max_threads'), multiprocessing.cpu_count())
if 'sqlite' in conf.get('core', 'sql_alchemy_conn'):
if self.max_threads > 1:
self.logger.error("Cannot use more than 1 thread when using sqlite. Setting max_threads to 1")
self.max_threads = 1
@provide_session
def manage_slas(self, dag, session=None):
"""
Finding all tasks that have SLAs defined, and sending alert emails
where needed. New SLA misses are also recorded in the database.
Where assuming that the scheduler runs often, so we only check for
tasks that should have succeeded in the past hour.
"""
TI = models.TaskInstance
sq = (
session
.query(
TI.task_id,
func.max(TI.execution_date).label('max_ti'))
.filter(TI.dag_id == dag.dag_id)
.filter(TI.state == State.SUCCESS)
.filter(TI.task_id.in_(dag.task_ids))
.group_by(TI.task_id).subquery('sq')
)
max_tis = session.query(TI).filter(
TI.dag_id == dag.dag_id,
TI.task_id == sq.c.task_id,
TI.execution_date == sq.c.max_ti,
).all()
ts = datetime.now()
SlaMiss = models.SlaMiss
for ti in max_tis:
task = dag.get_task(ti.task_id)
dttm = ti.execution_date
if task.sla:
dttm = dag.following_schedule(dttm)
while dttm < datetime.now():
following_schedule = dag.following_schedule(dttm)
if following_schedule + task.sla < datetime.now():
session.merge(models.SlaMiss(
task_id=ti.task_id,
dag_id=ti.dag_id,
execution_date=dttm,
timestamp=ts))
dttm = dag.following_schedule(dttm)
session.commit()
slas = (
session
.query(SlaMiss)
.filter(SlaMiss.email_sent.is_(False) or SlaMiss.notification_sent.is_(False))
.filter(SlaMiss.dag_id == dag.dag_id)
.all()
)
if slas:
sla_dates = [sla.execution_date for sla in slas]
qry = (
session
.query(TI)
.filter(TI.state != State.SUCCESS)
.filter(TI.execution_date.in_(sla_dates))
.filter(TI.dag_id == dag.dag_id)
.all()
)
blocking_tis = []
for ti in qry:
if ti.task_id in dag.task_ids:
ti.task = dag.get_task(ti.task_id)
blocking_tis.append(ti)
else:
session.delete(ti)
session.commit()
blocking_tis = ([ti for ti in blocking_tis
if ti.are_dependencies_met(session=session)])
task_list = "\n".join([
sla.task_id + ' on ' + sla.execution_date.isoformat()
for sla in slas])
blocking_task_list = "\n".join([
ti.task_id + ' on ' + ti.execution_date.isoformat()
for ti in blocking_tis])
# Track whether email or any alert notification sent
# We consider email or the alert callback as notifications
email_sent = False
notification_sent = False
if dag.sla_miss_callback:
# Execute the alert callback
self.logger.info(' --------------> ABOUT TO CALL SLA MISS CALL BACK ')
dag.sla_miss_callback(dag, task_list, blocking_task_list, slas, blocking_tis)
notification_sent = True
email_content = """\
Here's a list of tasks thas missed their SLAs:
<pre><code>{task_list}\n<code></pre>
Blocking tasks:
<pre><code>{blocking_task_list}\n{bug}<code></pre>
""".format(bug=asciiart.bug, **locals())
emails = []
for t in dag.tasks:
if t.email:
if isinstance(t.email, basestring):
l = [t.email]
elif isinstance(t.email, (list, tuple)):
l = t.email
for email in l:
if email not in emails:
emails.append(email)
if emails and len(slas):
send_email(
emails,
"[airflow] SLA miss on DAG=" + dag.dag_id,
email_content)
email_sent = True
notification_sent = True
# If we sent any notification, update the sla_miss table
if notification_sent:
for sla in slas:
if email_sent:
sla.email_sent = True
sla.notification_sent = True
session.merge(sla)
session.commit()
session.close()
def import_errors(self, dagbag):
session = settings.Session()
session.query(models.ImportError).delete()
for filename, stacktrace in list(dagbag.import_errors.items()):
session.add(models.ImportError(
filename=filename, stacktrace=stacktrace))
session.commit()
@provide_session
def schedule_dag(self, dag, session=None):
"""
This method checks whether a new DagRun needs to be created
for a DAG based on scheduling interval
Returns DagRun if one is scheduled. Otherwise returns None.
"""
if dag.schedule_interval:
DagRun = models.DagRun
active_runs = DagRun.find(
dag_id=dag.dag_id,
state=State.RUNNING,
external_trigger=False,
session=session
)
# return if already reached maximum active runs and no timeout setting
if len(active_runs) >= dag.max_active_runs and not dag.dagrun_timeout:
return
timedout_runs = 0
for dr in active_runs:
if (
dr.start_date and dag.dagrun_timeout and
dr.start_date < datetime.now() - dag.dagrun_timeout):
dr.state = State.FAILED
dr.end_date = datetime.now()
timedout_runs += 1
session.commit()
if len(active_runs) - timedout_runs >= dag.max_active_runs:
return
# this query should be replaced by find dagrun
qry = (
session.query(func.max(DagRun.execution_date))
.filter_by(dag_id=dag.dag_id)
.filter(or_(
DagRun.external_trigger == False,
# add % as a wildcard for the like query
DagRun.run_id.like(DagRun.ID_PREFIX+'%')
))
)
last_scheduled_run = qry.scalar()
# don't schedule @once again
if dag.schedule_interval == '@once' and last_scheduled_run:
return None
next_run_date = None
if not last_scheduled_run:
# First run
task_start_dates = [t.start_date for t in dag.tasks]
if task_start_dates:
next_run_date = dag.normalize_schedule(min(task_start_dates))
self.logger.debug("Next run date based on tasks {}"
.format(next_run_date))
else:
next_run_date = dag.following_schedule(last_scheduled_run)
# don't ever schedule prior to the dag's start_date
if dag.start_date:
next_run_date = (dag.start_date if not next_run_date
else max(next_run_date, dag.start_date))
if next_run_date == dag.start_date:
next_run_date = dag.normalize_schedule(dag.start_date)
self.logger.debug("Dag start date: {}. Next run date: {}"
.format(dag.start_date, next_run_date))
# this structure is necessary to avoid a TypeError from concatenating
# NoneType
if dag.schedule_interval == '@once':
period_end = next_run_date
elif next_run_date:
period_end = dag.following_schedule(next_run_date)
# Don't schedule a dag beyond its end_date (as specified by the dag param)
if next_run_date and dag.end_date and next_run_date > dag.end_date:
return
# Don't schedule a dag beyond its end_date (as specified by the task params)
# Get the min task end date, which may come from the dag.default_args
min_task_end_date = []
task_end_dates = [t.end_date for t in dag.tasks if t.end_date]
if task_end_dates:
min_task_end_date = min(task_end_dates)
if next_run_date and min_task_end_date and next_run_date > min_task_end_date:
return
if next_run_date and period_end and period_end <= datetime.now():
next_run = dag.create_dagrun(
run_id='scheduled__' + next_run_date.isoformat(),
execution_date=next_run_date,
start_date=datetime.now(),
state=State.RUNNING,
external_trigger=False
)
return next_run
def process_dag(self, dag, queue):
"""
This method schedules a single DAG by looking at the latest
run for each task and attempting to schedule the following run.
As multiple schedulers may be running for redundancy, this
function takes a lock on the DAG and timestamps the last run
in ``last_scheduler_run``.
"""
DagModel = models.DagModel
session = settings.Session()
# picklin'
pickle_id = None
if self.do_pickle and self.executor.__class__ not in (
executors.LocalExecutor, executors.SequentialExecutor):
pickle_id = dag.pickle(session).id
# obtain db lock
db_dag = session.query(DagModel).filter_by(
dag_id=dag.dag_id
).with_for_update().one()
last_scheduler_run = db_dag.last_scheduler_run or datetime(2000, 1, 1)
secs_since_last = (datetime.now() - last_scheduler_run).total_seconds()
if secs_since_last < self.heartrate:
# release db lock
session.commit()
session.close()
return None
# Release the db lock
# the assumption here is that process_dag will take less
# time than self.heartrate otherwise we might unlock too
# quickly and this should moved below, but that would increase
# the time the record is locked and is blocking for other calls.
db_dag.last_scheduler_run = datetime.now()
session.commit()
# update the state of the previously active dag runs
dag_runs = DagRun.find(dag_id=dag.dag_id, state=State.RUNNING, session=session)
active_dag_runs = []
for run in dag_runs:
# do not consider runs that are executed in the future
if run.execution_date > datetime.now():
continue
# todo: run.dag is transient but needs to be set
run.dag = dag
# todo: preferably the integrity check happens at dag collection time
run.verify_integrity(session=session)
run.update_state(session=session)
if run.state == State.RUNNING:
make_transient(run)
active_dag_runs.append(run)
for run in active_dag_runs:
# this needs a fresh session sometimes tis get detached
tis = run.get_task_instances(state=(State.NONE,
State.UP_FOR_RETRY))
# this loop is quite slow as it uses are_dependencies_met for
# every task (in ti.is_runnable). This is also called in
# update_state above which has already checked these tasks
for ti in tis:
task = dag.get_task(ti.task_id)
# fixme: ti.task is transient but needs to be set
ti.task = task
# future: remove adhoc
if task.adhoc:
continue
if ti.is_runnable(flag_upstream_failed=True):
self.logger.debug('Queuing task: {}'.format(ti))
queue.put((ti.key, pickle_id))
session.close()
@provide_session
def prioritize_queued(self, session, executor, dagbag):
# Prioritizing queued task instances
pools = {p.pool: p for p in session.query(models.Pool).all()}
TI = models.TaskInstance
queued_tis = (
session.query(TI)
.filter(TI.state == State.QUEUED)
.all()
)
self.logger.info(
"Prioritizing {} queued jobs".format(len(queued_tis)))
session.expunge_all()
d = defaultdict(list)
for ti in queued_tis:
if ti.dag_id not in dagbag.dags:
self.logger.info(
"DAG no longer in dagbag, deleting {}".format(ti))
session.delete(ti)
session.commit()
elif not dagbag.dags[ti.dag_id].has_task(ti.task_id):
self.logger.info(
"Task no longer exists, deleting {}".format(ti))
session.delete(ti)
session.commit()
else:
d[ti.pool].append(ti)
dag_blacklist = set(dagbag.paused_dags())
for pool, tis in list(d.items()):
if not pool:
# Arbitrary:
# If queued outside of a pool, trigger no more than
# non_pooled_task_slot_count per run
open_slots = conf.getint('core', 'non_pooled_task_slot_count')
else:
open_slots = pools[pool].open_slots(session=session)
queue_size = len(tis)
self.logger.info("Pool {pool} has {open_slots} slots, {queue_size} "
"task instances in queue".format(**locals()))
if open_slots <= 0:
continue
tis = sorted(
tis, key=lambda ti: (-ti.priority_weight, ti.start_date))
for ti in tis:
if open_slots <= 0:
continue
task = None
try:
task = dagbag.dags[ti.dag_id].get_task(ti.task_id)
except:
self.logger.error("Queued task {} seems gone".format(ti))
session.delete(ti)
session.commit()
continue
if not task:
continue
ti.task = task
# picklin'
dag = dagbag.dags[ti.dag_id]
pickle_id = None
if self.do_pickle and self.executor.__class__ not in (
executors.LocalExecutor,
executors.SequentialExecutor):
self.logger.info("Pickling DAG {}".format(dag))
pickle_id = dag.pickle(session).id
if dag.dag_id in dag_blacklist:
continue
if dag.concurrency_reached:
dag_blacklist.add(dag.dag_id)
continue
if ti.are_dependencies_met():
executor.queue_task_instance(ti, pickle_id=pickle_id)
open_slots -= 1
else:
session.delete(ti)
session.commit()
continue
ti.task = task
session.commit()
def _split(self, items, size):
"""
This function splits a list of items into chunks of int size.
_split([1,2,3,4,5,6], 3) becomes [[1,2,3],[4,5,6]]
"""
size = max(1, size)
return [items[i:i + size] for i in range(0, len(items), size)]
def _do_dags(self, dagbag, dags, tis_out):
"""
Iterates over the dags and schedules and processes them
"""
for dag in dags:
self.logger.debug("Scheduling {}".format(dag.dag_id))
dag = dagbag.get_dag(dag.dag_id)
if not dag:
continue
try:
self.schedule_dag(dag)
self.process_dag(dag, tis_out)
self.manage_slas(dag)
except Exception as e:
self.logger.exception(e)
@provide_session
def _reset_state_for_orphaned_tasks(self, dag_run, session=None):
"""
This function checks for a DagRun if there are any tasks
that have a scheduled state but are not known by the
executor. If it finds those it will reset the state to None
so they will get picked up again.
"""
queued_tis = self.executor.queued_tasks
# also consider running as the state might not have changed in the db yet
running = self.executor.running
tis = dag_run.get_task_instances(state=State.SCHEDULED, session=session)
for ti in tis:
if ti.key not in queued_tis and ti.key not in running:
ti.state = State.NONE
self.logger.debug("Rescheduling orphaned task {}".format(ti))
session.commit()
def _execute(self):
session = settings.Session()
TI = models.TaskInstance
pessimistic_connection_handling()
logging.basicConfig(level=logging.DEBUG)
self.logger.info("Starting the scheduler")
dagbag = models.DagBag(self.subdir, sync_to_db=True)
executor = self.executor = dagbag.executor
executor.start()
# grab orphaned tasks and make sure to reset their state
active_runs = DagRun.find(
state=State.RUNNING,
external_trigger=False,
session=session
)
for dr in active_runs:
self._reset_state_for_orphaned_tasks(dr, session=session)
self.runs = 0
while not self.num_runs or self.num_runs > self.runs:
try:
loop_start_dttm = datetime.now()
try:
self.prioritize_queued(executor=executor, dagbag=dagbag)
except Exception as e:
self.logger.exception(e)
self.runs += 1
try:
if self.runs % self.refresh_dags_every == 0:
dagbag = models.DagBag(self.subdir, sync_to_db=True)
else:
dagbag.collect_dags(only_if_updated=True)
except Exception as e:
self.logger.error("Failed at reloading the dagbag. {}".format(e))
Stats.incr('dag_refresh_error', 1, 1)
sleep(5)
if len(self.dag_ids) > 0:
dags = [dag for dag in dagbag.dags.values() if dag.dag_id in self.dag_ids]
else:
dags = [
dag for dag in dagbag.dags.values()
if not dag.parent_dag]
paused_dag_ids = dagbag.paused_dags()
dags = [x for x in dags if x.dag_id not in paused_dag_ids]
# dags = filter(lambda x: x.dag_id not in paused_dag_ids, dags)
self.logger.debug("Total Cores: {} Max Threads: {} DAGs:{}".
format(multiprocessing.cpu_count(),
self.max_threads,
len(dags)))
dags = self._split(dags, math.ceil(len(dags) / self.max_threads))
tis_q = multiprocessing.Queue()
jobs = [multiprocessing.Process(target=self._do_dags,
args=(dagbag, dags[i], tis_q))
for i in range(len(dags))]
self.logger.info("Starting {} scheduler jobs".format(len(jobs)))
for j in jobs:
j.start()
while any(j.is_alive() for j in jobs):
while not tis_q.empty():
ti_key, pickle_id = tis_q.get()
dag = dagbag.dags[ti_key[0]]
task = dag.get_task(ti_key[1])
ti = TI(task, ti_key[2])
ti.refresh_from_db(session=session, lock_for_update=True)
if ti.state == State.SCHEDULED:
session.commit()
self.logger.debug("Task {} was picked up by another scheduler"
.format(ti))
continue
elif ti.state is State.NONE:
ti.state = State.SCHEDULED
self.executor.queue_task_instance(ti, pickle_id=pickle_id)
session.merge(ti)
session.commit()
for j in jobs:
j.join()
self.logger.info("Done queuing tasks, calling the executor's "
"heartbeat")
duration_sec = (datetime.now() - loop_start_dttm).total_seconds()
self.logger.info("Loop took: {} seconds".format(duration_sec))
Stats.timing("scheduler_loop", duration_sec * 1000)
try:
self.import_errors(dagbag)
except Exception as e:
self.logger.exception(e)
try:
dagbag.kill_zombies()
except Exception as e:
self.logger.exception(e)
try:
# We really just want the scheduler to never ever stop.
executor.heartbeat()
self.heartbeat()
except Exception as e:
self.logger.exception(e)
self.logger.error("Tachycardia!")
except Exception as deep_e:
self.logger.exception(deep_e)
raise
finally:
settings.Session.remove()
executor.end()
session.close()
@provide_session
def heartbeat_callback(self, session=None):
Stats.gauge('scheduler_heartbeat', 1, 1)
class BackfillJob(BaseJob):
"""
A backfill job consists of a dag or subdag for a specific time range. It
triggers a set of task instance runs, in the right order and lasts for
as long as it takes for the set of task instance to be completed.
"""
__mapper_args__ = {
'polymorphic_identity': 'BackfillJob'
}
def __init__(
self,
dag, start_date=None, end_date=None, mark_success=False,
include_adhoc=False,
donot_pickle=False,
ignore_dependencies=False,
ignore_first_depends_on_past=False,
pool=None,
*args, **kwargs):
self.dag = dag
self.dag_id = dag.dag_id
self.bf_start_date = start_date
self.bf_end_date = end_date
self.mark_success = mark_success
self.include_adhoc = include_adhoc
self.donot_pickle = donot_pickle
self.ignore_dependencies = ignore_dependencies
self.ignore_first_depends_on_past = ignore_first_depends_on_past
self.pool = pool
super(BackfillJob, self).__init__(*args, **kwargs)
def _execute(self):
"""
Runs a dag for a specified date range.
"""
session = settings.Session()
start_date = self.bf_start_date
end_date = self.bf_end_date
# picklin'
pickle_id = None
if not self.donot_pickle and self.executor.__class__ not in (
executors.LocalExecutor, executors.SequentialExecutor):
pickle = models.DagPickle(self.dag)
session.add(pickle)
session.commit()
pickle_id = pickle.id
executor = self.executor
executor.start()
executor_fails = Counter()
# Build a list of all instances to run
tasks_to_run = {}
failed = set()
succeeded = set()
started = set()
skipped = set()
not_ready = set()
deadlocked = set()
for task in self.dag.tasks:
if (not self.include_adhoc) and task.adhoc:
continue
start_date = start_date or task.start_date
end_date = end_date or task.end_date or datetime.now()
for dttm in self.dag.date_range(start_date, end_date=end_date):
ti = models.TaskInstance(task, dttm)
tasks_to_run[ti.key] = ti
session.merge(ti)
session.commit()
# Triggering what is ready to get triggered
while tasks_to_run and not deadlocked:
not_ready.clear()
for key, ti in list(tasks_to_run.items()):
ti.refresh_from_db(session=session, lock_for_update=True)
ignore_depends_on_past = (
self.ignore_first_depends_on_past and
ti.execution_date == (start_date or ti.start_date))
# The task was already marked successful or skipped by a
# different Job. Don't rerun it.
if ti.state == State.SUCCESS:
succeeded.add(key)
tasks_to_run.pop(key)
session.commit()
continue
elif ti.state == State.SKIPPED:
skipped.add(key)
tasks_to_run.pop(key)
session.commit()
continue
# Is the task runnable? -- then run it
if ti.is_queueable(
include_queued=True,
ignore_depends_on_past=ignore_depends_on_past,
flag_upstream_failed=True):
self.logger.debug('Sending {} to executor'.format(ti))
if ti.state == State.NONE:
ti.state = State.SCHEDULED
session.merge(ti)
session.commit()
executor.queue_task_instance(
ti,
mark_success=self.mark_success,
pickle_id=pickle_id,
ignore_dependencies=self.ignore_dependencies,
ignore_depends_on_past=ignore_depends_on_past,
pool=self.pool)
started.add(key)
# Mark the task as not ready to run
elif ti.state in (State.NONE, State.UPSTREAM_FAILED):
not_ready.add(key)
session.commit()
self.heartbeat()
executor.heartbeat()
# If the set of tasks that aren't ready ever equals the set of
# tasks to run, then the backfill is deadlocked
if not_ready and not_ready == set(tasks_to_run):
deadlocked.update(tasks_to_run.values())
tasks_to_run.clear()
# Reacting to events
for key, state in list(executor.get_event_buffer().items()):
dag_id, task_id, execution_date = key
if key not in tasks_to_run:
continue
ti = tasks_to_run[key]
ti.refresh_from_db()
# executor reports failure
if state == State.FAILED:
# task reports running
if ti.state == State.RUNNING:
msg = (
'Executor reports that task instance {} failed '
'although the task says it is running.'.format(key))
self.logger.error(msg)
ti.handle_failure(msg)
tasks_to_run.pop(key)
# task reports skipped
elif ti.state == State.SKIPPED:
self.logger.error("Skipping {} ".format(key))
skipped.add(key)
tasks_to_run.pop(key)
# anything else is a failure
else:
self.logger.error("Task instance {} failed".format(key))
failed.add(key)
tasks_to_run.pop(key)
# executor reports success
elif state == State.SUCCESS:
# task reports success
if ti.state == State.SUCCESS:
self.logger.info(
'Task instance {} succeeded'.format(key))
succeeded.add(key)
tasks_to_run.pop(key)
# task reports failure
elif ti.state == State.FAILED:
self.logger.error("Task instance {} failed".format(key))
failed.add(key)
tasks_to_run.pop(key)
# task reports skipped
elif ti.state == State.SKIPPED:
self.logger.info("Task instance {} skipped".format(key))
skipped.add(key)
tasks_to_run.pop(key)
# this probably won't ever be triggered
elif ti in not_ready:
self.logger.info(
"{} wasn't expected to run, but it did".format(ti))
# executor reports success but task does not - this is weird
elif ti.state not in (
State.SCHEDULED,
State.QUEUED,
State.UP_FOR_RETRY):
self.logger.error(
"The airflow run command failed "
"at reporting an error. This should not occur "
"in normal circumstances. Task state is '{}',"
"reported state is '{}'. TI is {}"
"".format(ti.state, state, ti))
# if the executor fails 3 or more times, stop trying to
# run the task
executor_fails[key] += 1
if executor_fails[key] >= 3:
msg = (
'The airflow run command failed to report an '
'error for task {} three or more times. The '
'task is being marked as failed. This is very '
'unusual and probably means that an error is '
'taking place before the task even '
'starts.'.format(key))
self.logger.error(msg)
ti.handle_failure(msg)
tasks_to_run.pop(key)
msg = ' | '.join([
"[backfill progress]",
"waiting: {0}",
"succeeded: {1}",
"kicked_off: {2}",
"failed: {3}",
"skipped: {4}",
"deadlocked: {5}"
]).format(
len(tasks_to_run),
len(succeeded),
len(started),
len(failed),
len(skipped),
len(deadlocked))
self.logger.info(msg)
executor.end()
session.close()
err = ''
if failed:
err += (
"---------------------------------------------------\n"
"Some task instances failed:\n{}\n".format(failed))
if deadlocked:
err += (
'---------------------------------------------------\n'
'BackfillJob is deadlocked.')
deadlocked_depends_on_past = any(
t.are_dependencies_met() != t.are_dependencies_met(
ignore_depends_on_past=True)
for t in deadlocked)
if deadlocked_depends_on_past:
err += (
'Some of the deadlocked tasks were unable to run because '
'of "depends_on_past" relationships. Try running the '
'backfill with the option '
'"ignore_first_depends_on_past=True" or passing "-I" at '
'the command line.')
err += ' These tasks were unable to run:\n{}\n'.format(deadlocked)
if err:
raise AirflowException(err)
self.logger.info("Backfill done. Exiting.")
class LocalTaskJob(BaseJob):
__mapper_args__ = {
'polymorphic_identity': 'LocalTaskJob'
}
def __init__(
self,
task_instance,
ignore_dependencies=False,
ignore_depends_on_past=False,
force=False,
mark_success=False,
pickle_id=None,
pool=None,
*args, **kwargs):
self.task_instance = task_instance
self.ignore_dependencies = ignore_dependencies
self.ignore_depends_on_past = ignore_depends_on_past
self.force = force
self.pool = pool
self.pickle_id = pickle_id
self.mark_success = mark_success
# terminating state is used so that a job don't try to
# terminate multiple times
self.terminating = False
# Keeps track of the fact that the task instance has been observed
# as running at least once
self.was_running = False
super(LocalTaskJob, self).__init__(*args, **kwargs)
def _execute(self):
command = self.task_instance.command(
raw=True,
ignore_dependencies=self.ignore_dependencies,
ignore_depends_on_past=self.ignore_depends_on_past,
force=self.force,
pickle_id=self.pickle_id,
mark_success=self.mark_success,
job_id=self.id,
pool=self.pool,
)
self.process = subprocess.Popen(['bash', '-c', command])
return_code = None
while return_code is None:
self.heartbeat()
return_code = self.process.poll()
def on_kill(self):
self.process.terminate()
@provide_session
def heartbeat_callback(self, session=None):
"""Self destruct task if state has been moved away from running externally"""
if self.terminating:
# task is already terminating, let it breathe
return
# Suicide pill
TI = models.TaskInstance
ti = self.task_instance
state = session.query(TI.state).filter(
TI.dag_id==ti.dag_id, TI.task_id==ti.task_id,
TI.execution_date==ti.execution_date).scalar()
if state == State.RUNNING:
self.was_running = True
elif self.was_running and hasattr(self, 'process'):
logging.warning(
"State of this instance has been externally set to "
"{self.task_instance.state}. "
"Taking the poison pill. So long.".format(**locals()))
self.process.terminate()
self.terminating = True
|
apache-2.0
| 6,076,973,327,566,079,000
| 37.037294
| 110
| 0.519393
| false
| 4.414394
| false
| false
| false
|
frankdilo/cropper-python
|
cropper/cli.py
|
1
|
4116
|
#!/usr/bin/env python
import os
from glob import glob
import click
import shutil
from PIL import Image
from devices import DEVICES, ALL_DEVICE_NAMES
IPAD_MASTER_DIRNAME = 'iPadMaster'
IPHONE_MASTER_DIRNAME = 'iPhoneMaster'
def safe_mkdir(path):
try:
os.mkdir(path)
except OSError:
pass
def safe_mkdir_intermediate(path):
"""
Create dir at path, without failing if it already exists. Also intermediate directories are created.
"""
dir_path = os.path.dirname(path)
try:
os.makedirs(dir_path)
except OSError:
pass
def transform_images(src_paths, dest_paths, resize_to, crop_margins):
for (src, dest) in zip(src_paths, dest_paths):
src_image = Image.open(src)
final_image = src_image
# resize
if resize_to:
final_image = src_image.resize(resize_to, Image.LANCZOS)
# crop
if crop_margins:
# left, upper, right, lower
cropped_size = (0 + crop_margins[0]/2, 0, resize_to[0] - crop_margins[0]/2, resize_to[1]-crop_margins[1])
final_image = final_image.crop(cropped_size)
# save
safe_mkdir_intermediate(dest)
final_image.save(dest)
def group_screenshots_by_language(master_dir):
language_dirs = glob(os.path.join(master_dir, IPHONE_MASTER_DIRNAME, '*'))
language_dirs = filter(os.path.isdir, language_dirs)
supported_languages = [os.path.basename(lang_dir) for lang_dir in language_dirs]
screens_by_device = {device: glob(os.path.join(master_dir, device, '*', '*.png'))
for device in ALL_DEVICE_NAMES}
screens_by_language_and_device = {lang: {} for lang in supported_languages}
for device, screens in screens_by_device.iteritems():
for lang in supported_languages:
screens_by_language_and_device[lang][device] = filter(lambda path: lang in path, screens)
for lang in supported_languages:
# create top-level language directory
lang_dir = os.path.join(master_dir, lang)
safe_mkdir(lang_dir)
# create one sub-folder inside the language directory, for each device type
for device in ALL_DEVICE_NAMES:
device_subdir = os.path.join(lang_dir, device)
safe_mkdir(device_subdir)
screens_to_move = screens_by_language_and_device[lang][device]
for tomove in screens_to_move:
dest = os.path.join(device_subdir, os.path.basename(tomove))
os.rename(tomove, dest)
def rm_empty_device_folders(master_dir):
for device in ALL_DEVICE_NAMES:
dir_path = os.path.join(master_dir, device)
shutil.rmtree(dir_path)
@click.command()
@click.argument('master_dir', type=str)
def main(master_dir):
master_dir = os.path.abspath(master_dir)
iphone_images_pattern = os.path.join(master_dir, IPHONE_MASTER_DIRNAME) + '/*/*.png'
ipad_images_pattern = os.path.join(master_dir, IPAD_MASTER_DIRNAME) + '/*/*.png'
iphone_img_paths = glob(iphone_images_pattern)
ipad_img_paths = glob(ipad_images_pattern)
if not iphone_img_paths:
print "Error: no master iPhone images found!"
exit(1)
if not ipad_img_paths:
print "Error: no master iPad images found!"
exit(1)
# iphone screenshots
for device_name, operations in DEVICES['iPhone'].items():
dest_paths = [img_path.replace('iPhoneMaster', device_name) for img_path in iphone_img_paths]
transform_images(iphone_img_paths, dest_paths, operations['resize'], operations['crop'])
print "{} done".format(device_name)
# ipad screenshots
for device_name, operations in DEVICES['iPad'].items():
dest_paths = [img_path.replace('iPadMaster', device_name) for img_path in ipad_img_paths]
transform_images(ipad_img_paths, dest_paths, operations['resize'], operations['crop'])
print "{} done".format(device_name)
print "Reorganizing languages..."
group_screenshots_by_language(master_dir)
print "Cleaning up..."
rm_empty_device_folders(master_dir)
main()
|
bsd-3-clause
| -8,730,890,915,175,512,000
| 30.906977
| 117
| 0.646987
| false
| 3.458824
| false
| false
| false
|
botherder/volatility
|
volatility/plugins/mac/netstat.py
|
1
|
2242
|
# Volatility
# Copyright (C) 2007-2013 Volatility Foundation
#
# This file is part of Volatility.
#
# Volatility is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Volatility is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Volatility. If not, see <http://www.gnu.org/licenses/>.
#
"""
@author: Andrew Case
@license: GNU General Public License 2.0
@contact: atcuno@gmail.com
@organization:
"""
import volatility.obj as obj
import volatility.plugins.mac.lsof as lsof
class mac_netstat(lsof.mac_lsof):
""" Lists active per-process network connections """
def render_text(self, outfd, data):
self.table_header(outfd, [("Proto", "6"),
("Local IP", "20"),
("Local Port", "6"),
("Remote IP", "20"),
("Remote Port", "6"),
("State", "20"),
("Process", "24")])
for proc, i, fd, _path in data:
if fd.f_fglob.fg_type == 'DTYPE_SOCKET':
socket = fd.f_fglob.fg_data.dereference_as("socket")
family = socket.family
if family == 1:
upcb = socket.so_pcb.dereference_as("unpcb")
path = upcb.unp_addr.sun_path
outfd.write("UNIX {0}\n".format(path))
elif family in [2, 30]:
proto = socket.protocol
state = socket.state
(lip, lport, rip, rport) = socket.get_connection_info()
self.table_row(outfd, proto, lip, lport, rip, rport, state, "{}/{}".format(proc.p_comm, proc.p_pid))
|
gpl-2.0
| 9,063,788,820,111,397,000
| 35.754098
| 120
| 0.544157
| false
| 4.113761
| false
| false
| false
|
snakazawa/qibluemix
|
sample/sttproxy/sttproxy.py
|
1
|
2371
|
# -*- coding: utf-8 -*-
u"""
see readme.md
"""
import os
import sys
import time
from naoqi import ALBroker
sys.path.append(os.path.dirname(os.path.abspath(__file__)) + '/../../')
from qibluemix import STTProxy, get_logger
from qibluemix.pepper import SpeechRecognitionMemory, StreamingAudioRecorder
from qibluemix.watson import Watson
# ==== parameters ====
PEPPER_IP = "192.168.xxx.xxx"
PEPPER_PORT = 9559
EVENT_ROOT_NAME = "Bluemix/STTProxy/" # 本アプリが使用するPepperのメモリのルートパス
USERNAME = "********" # credentials.username (Bluemix Speech To Text)
PASSWORD = "********" # credentials.password (Bluemix Speech To Text)
URL = "https://stream.watsonplatform.net/speech-to-text/api"
CONFIDENCE = 0.2 # 変換における信頼性の許容値(0.0~1.0) 許容値未満の変換結果は無視される
# ==== /parameters ====
StreamingAudioRecorderModule = None
SpeechRecognitionMemoryModule = None
broker = None
logger = get_logger()
def main():
global SpeechRecognitionMemoryModule
global StreamingAudioRecorderModule
global broker
logger.info("init watson")
watson = Watson(USERNAME, PASSWORD, URL)
token = get_token(watson)
stream = watson.recognize_stream(token)
logger.info("init remote pepper")
broker = ALBroker("myBroker", "0.0.0.0", 0, PEPPER_IP, PEPPER_PORT)
logger.info("init StreamingAudioRecorder")
recorder = StreamingAudioRecorderModule = StreamingAudioRecorder("StreamingAudioRecorderModule")
logger.info("init SpeechRecognitionMemory")
memory = SpeechRecognitionMemoryModule = SpeechRecognitionMemory("SpeechRecognitionMemoryModule", EVENT_ROOT_NAME)
logger.info("init SpeechToTextProxy")
proxy = STTProxy(recorder, stream, memory)
proxy.init()
logger.info("ready...")
# manual(proxy, duration=10, after_wait=3)
# service
while True:
time.sleep(1)
def manual(proxy, duration=10, after_wait=3):
logger.info("start")
proxy.start()
time.sleep(duration)
logger.info("stop")
proxy.stop()
time.sleep(after_wait)
logger.info("end")
def get_token(watson):
r = watson.get_token()
if r.status_code != 200:
logger.info(r.url)
logger.info(r.status_code)
logger.info(r.text.encode('utf-8'))
exit(1)
return r.text
if __name__ == "__main__":
main()
|
mit
| -6,306,866,242,211,763,000
| 24.561798
| 118
| 0.687912
| false
| 2.890724
| false
| false
| false
|
RudolfCardinal/crate
|
crate_anon/preprocess/rio_pk.py
|
1
|
5802
|
#!/usr/bin/env python
"""
crate_anon/preprocess/rio_pk.py
===============================================================================
Copyright (C) 2015-2021 Rudolf Cardinal (rudolf@pobox.com).
This file is part of CRATE.
CRATE is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
CRATE is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with CRATE. If not, see <http://www.gnu.org/licenses/>.
===============================================================================
**Details of the names of primary keys in selected RiO tables.**
"""
__SUPERSEDED = """
RIO_6_2_ATYPICAL_PKS = { # SUPERSEDED by better PK detection
# These are table: pk_field mappings for PATIENT tables, i.e. those
# containing the ClientID field, where that PK is not the default of
# SequenceID.
# -------------------------------------------------------------------------
# RiO Core
# -------------------------------------------------------------------------
# Ams*: Appointment Management System
'AmsAppointmentContactActivity': 'ActivitySequenceID',
'AmsAppointmentOtherHCP': None, # non-patient; non-unique SequenceID
# ... SequenceID is non-unique and the docs also list it as an FK;
# ActivitySequenceID this is unique and a PK
'AmsReferralDatesArchive': 'AMSSequenceID',
# ... UNVERIFIED as no rows in our data; listed as a PK and an FK
'AmsReferralListUrgency': None,
'AmsReferralListWaitingStatus': None,
'AmsStream': None, # non-patient; non-unique SequenceID
'CarePlanIndex': 'CarePlanID',
'CarePlanProblemOrder': None,
'ClientAddressMerged': None, # disused table
'ClientCareSpell': None, # CareSpellNum is usually 1 for a given ClientID
'ClientDocumentAdditionalClient': None,
'ClientFamily': None,
'ClientFamilyLink': None,
'ClientGPMerged': None,
'ClientHealthCareProvider': None,
'ClientMerge': None,
'ClientMerged': None,
'ClientName': 'ClientNameID',
'ClientOtherDetail': None, # not in docs, but looks like Core
'ClientPhoto': None,
'ClientPhotoMerged': None,
'ClientProperty': None,
'ClientPropertyMerged': None,
'ClientTelecom': 'ClientTelecomID',
'ClientUpdatePDSCache': None,
# Con*: Contracts
'Contract': 'ContractNumber',
'ConAdHocAwaitingApproval': 'SequenceNo',
'ConClientInitialBedRate': None,
'ConClinicHistory': 'SequenceNo',
'ConLeaveDiscountHistory': 'SequenceNo',
# Not documented, but looks like Core
'Deceased': None, # or possibly TrustWideID (or just ClientID!)
'DemClientDeletedDetails': None,
# EP: E-Prescribing
# ... with DA: Drug Administration
# ... with DS: Drug Service
'EPClientConditions': 'RowID',
'EPClientPrescription': 'PrescriptionID',
'EPClientSensitivities': None, # UNVERIFIED: None? Joint PK on ProdID?
'EPDiscretionaryDrugClientLink': None,
'EPVariableDosageDrugLink': 'HistoryID', # UNVERIFIED
'EPClientAllergies': 'ReactionID',
'DAConcurrencyControl': None,
'DAIPPrescription': 'PrescriptionID',
'DSBatchPatientGroups': None,
'DSMedicationBatchContinue': None,
'DSMedicationBatchLink': None,
# Ims*: Inpatient Management System
'ImsEventLeave': 'UniqueSequenceID', # SequenceID
'ImsEventMovement': None,
'ImsEventRefno': None, # Not in docs but looks like Core.
'ImsEventRefnoBAKUP': None, # [Sic.] Not in docs but looks like Core.
# LR*: Legitimate Relationships
'LRIdentifiedCache': None,
# Mes*: messaging
'MesLettersGenerated': 'Reference',
# Mnt*: Mental Health module (re MHA detention)
'MntArtAttendee': None, # SequenceID being "of person within a meeting"
'MntArtOutcome': None, # ditto
'MntArtPanel': None, # ditto
'MntArtRpts': None, # ditto
'MntArtRptsReceived': None, # ditto
'MntClientEctSection62': None,
'MntClientMedSection62': None,
'MntClientSectionDetailCareCoOrdinator': None,
'MntClientSectionDetailCourtAppearance': None,
'MntClientSectionDetailFMR': None,
'MntClientSectionReview': None,
# NDTMS*: Nation(al?) Drug Treatment Monitoring System
# SNOMED*: SNOMED
'SNOMED_Client': 'SC_ID',
# UserAssess*: user assessment (= non-core?) tables.
# See other default PK below: type12:
# -------------------------------------------------------------------------
# Non-core? No docs available.
# -------------------------------------------------------------------------
# Chd*: presumably, child development
'ChdClientDevCheckBreastFeeding': None,
# ... guess; DevChkSeqID is probably FK to ChdClientDevCheck.SequenceID
# ??? But it has q1-q30, qu2-14, home, sch, comm... assessment tool...
'CYPcurrentviewImport': None, # not TrustWideID (which is non-unique)
'GoldmineIfcMapping': None, # no idea, really, and no data to explore
'KP90ErrorLog': None,
'ReportsOutpatientWaitersHashNotSeenReferrals': None,
'ReportsOutpatientWaitersNotSeenReferrals': None,
'UserAssesstfkcsa_childprev': 'type12_RowID', # Keeping Children Safe Assessment subtable # noqa
'UserAssesstfkcsa_childs': 'type12_RowID', # Keeping Children Safe Assessment subtable # noqa
}
"""
RIO_6_2_ATYPICAL_PATIENT_ID_COLS = {
'SNOMED_Client': 'SC_ClientID',
}
|
gpl-3.0
| -156,543,975,734,238,370
| 35.2625
| 102
| 0.638401
| false
| 3.555147
| false
| false
| false
|
camillescott/boink
|
goetia/cli/cdbg_stream.py
|
1
|
11576
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# (c) Camille Scott, 2019
# File : cdbg_stream.py
# License: MIT
# Author : Camille Scott <camille.scott.w@gmail.com>
# Date : 11.03.2020
from goetia import libgoetia
from goetia.cdbg import (compute_connected_component_callback,
compute_unitig_fragmentation_callback,
write_cdbg_metrics_callback,
write_cdbg_callback)
from goetia.dbg import get_graph_args, process_graph_args
from goetia.parsing import get_fastx_args, iter_fastx_inputs
from goetia.processors import AsyncSequenceProcessor, at_modulo_interval
from goetia.messages import (Interval, SampleStarted, SampleFinished, Error, AllMessages)
from goetia.metadata import CUR_TIME
from goetia.serialization import cDBGSerialization
from goetia.cli.args import get_output_interval_args, print_interval_settings
from goetia.cli.runner import CommandRunner
import curio
import os
import sys
class cDBGRunner(CommandRunner):
def __init__(self, parser):
get_graph_args(parser)
get_cdbg_args(parser)
get_output_interval_args(parser)
group = get_fastx_args(parser)
group.add_argument('-o', dest='output_filename', default='/dev/stdout')
group.add_argument('-i', '--inputs', dest='inputs', nargs='+', required=True)
parser.add_argument('--echo', default=None,
help='echo all events to the given file.')
parser.add_argument('--curio-monitor', default=False, action='store_true',
help='Run curio kernel monitor for async debugging.')
parser.add_argument('--verbose', default=False, action='store_true')
super().__init__(parser)
def postprocess_args(self, args):
process_graph_args(args)
process_cdbg_args(args)
def setup(self, args):
os.makedirs(args.results_dir, exist_ok=True)
self.dbg_t = args.graph_t
self.hasher = args.hasher_t(args.ksize)
self.storage = args.storage.build(*args.storage_args)
self.dbg = args.graph_t.build(self.storage, self.hasher)
self.cdbg_t = libgoetia.cdbg.cDBG[type(self.dbg)]
self.compactor_t = libgoetia.cdbg.StreamingCompactor[type(self.dbg)]
self.compactor = self.compactor_t.Compactor.build(self.dbg)
if args.normalize:
self.file_processor = self.compactor_t.NormalizingCompactor[FastxReader].build(self.compactor,
args.normalize,
args.interval)
else:
self.file_processor = self.compactor_t.Processor.build(self.compactor,
args.interval)
# Iterator over samples (pairs or singles, depending on pairing-mode)
sample_iter = iter_fastx_inputs(args.inputs, args.pairing_mode, names=args.names)
# AsyncSequenceProcessor does event management and callback for the FileProcessors
self.processor = AsyncSequenceProcessor(self.file_processor, sample_iter, args.echo)
# Subscribe a listener to the FileProcessor producer
self.worker_listener = self.processor.add_listener('worker_q', 'cdbg.consumer')
#
# Register callbacks for data outputs.
# Track a list of files that need to be closed with a ]
# when we're done.
#
self.to_close = []
if args.track_cdbg_metrics:
self.worker_listener.on_message(Interval,
write_cdbg_metrics_callback,
self.compactor,
args.track_cdbg_metrics,
args.verbose)
self.to_close.append(args.track_cdbg_metrics)
if args.track_unitig_bp:
if args.unitig_bp_bins is None:
bins = [args.ksize, 100, 200, 500, 1000]
else:
bins = args.unitig_bp_bins
self.worker_listener.on_message(Interval,
at_modulo_interval(compute_unitig_fragmentation_callback,
modulus=args.unitig_bp_tick),
self.cdbg_t,
self.compactor.cdbg,
args.track_unitig_bp,
bins,
verbose=args.verbose)
self.to_close.append(args.track_unitig_bp)
if args.track_cdbg_components:
self.worker_listener.on_message(Interval,
at_modulo_interval(compute_connected_component_callback,
modulus=args.cdbg_components_tick),
self.cdbg_t,
self.compactor.cdbg,
args.track_cdbg_components,
args.component_sample_size,
verbose=args.verbose)
self.to_close.append(args.track_cdbg_components)
if args.save_cdbg:
for cdbg_format in args.save_cdbg_format:
self.worker_listener.on_message(Interval,
at_modulo_interval(write_cdbg_callback,
modulus=args.cdbg_tick),
args.save_cdbg,
cdbg_format,
verbose=args.verbose)
self.worker_listener.on_message(SampleFinished,
write_cdbg_callback,
args.save_cdbg,
cdbg_format,
verbose=args.verbose)
# Close all files when done
async def close_files(msg, files):
for file_name in files:
async with curio.aopen(file_name, 'a') as fp:
await fp.write('\n]\n')
self.worker_listener.on_message(SampleFinished, close_files, self.to_close)
#
# Regular diagnostics output
#
def info_output(msg):
info = f'{msg.msg_type}: {getattr(msg, "state", "")}'\
f'\n\tSample: {msg.sample_name}'\
f'\n\tSequences: {msg.sequence}'\
f'\n\tk-mers: {msg.t}'
if msg.msg_type == 'Error':
info += f'\n\tError: {msg.error}'
print(info, file=sys.stderr)
self.worker_listener.on_message(AllMessages, info_output)
def execute(self, args):
curio.run(self.processor.start, with_monitor=args.curio_monitor)
def teardown(self):
pass
def get_cdbg_args(parser):
default_prefix = 'goetia.build-cdbg.' + CUR_TIME
parser.default_prefix = default_prefix
group = parser.add_argument_group('cDBG')
group.add_argument('--results-dir',
default=default_prefix)
group.add_argument('--normalize',
type=int,
nargs='?',
const=10)
group.add_argument('--save-cdbg',
metavar='PREFIX.<format>',
nargs='?',
const='goetia.cdbg.graph',
help='Save a copy of the cDBG.')
group.add_argument('--save-cdbg-format',
nargs='+',
choices=cDBGSerialization.FORMATS,
default=['gfa1'])
group.add_argument('--cdbg-tick',
type=int,
default=10,
help='Save every N interval ticks.')
group.add_argument('--track-cdbg-metrics',
metavar='FILE_NAME.json',
nargs='?',
const='goetia.cdbg.stats.json',
help='Output basic cDBG metrics.')
group.add_argument('--cdbg-metrics-tick',
type=int,
default=5,
help='Output every N interval ticks.')
group.add_argument('--track-cdbg-components',
metavar='FILE_NAME.json',
nargs='?',
const='goetia.cdbg.components.json',
help='Save the distribution of component sizes.')
group.add_argument('--component-sample-size',
type=int,
default=10000,
help='Number of components to sample for size.')
group.add_argument('--cdbg-components-tick',
type=int,
default=5,
help='Sample and save distribution every N interval ticks.')
group.add_argument('--track-unitig-bp',
metavar='FILENAME.json',
nargs='?',
const='goetia.cdbg.unitigs.bp.json',
help='Track the distribution of unitig sizes.')
group.add_argument('--unitig-bp-bins',
nargs='+',
type=int,
help='Bin sizes of distribution.')
group.add_argument('--unitig-bp-tick',
type=int,
default=10)
group.add_argument('--validate',
metavar='FILENAME.csv',
nargs='?',
const='goetia.cdbg.validation.csv')
return group
def process_cdbg_args(args):
def join(p):
return p if p is None else os.path.join(args.results_dir, p)
args.track_cdbg_stats = join(args.track_cdbg_metrics)
args.track_cdbg_components = join(args.track_cdbg_components)
args.save_cdbg = join(args.save_cdbg)
args.track_cdbg_unitig_bp = join(args.track_unitig_bp)
def print_cdbg_args(args):
print('* cDBG Params', file=sys.stderr)
print('* Directory: ', args.results_dir, file=sys.stderr)
if args.save_cdbg:
print('* Saving cDBG every {0} sequences with file prefix {1}'.format(args.coarse_interval,
args.save_cdbg),
file=sys.stderr)
print('* cDBG save formats: {0}'.format(', '.join(args.save_cdbg_format)))
if args.track_cdbg_stats:
print('* Tracking cDBG stats and reporting every {0} sequences'.format(args.fine_interval),
file=sys.stderr)
print('* Saving tracking information to', args.track_cdbg_stats, file=sys.stderr)
if args.track_cdbg_history:
print('* Tracking cDBG history and saving to', args.track_cdbg_history, file=sys.stderr)
if args.validate:
print('* cDBG will be validated on completion and results saved to', args.validate,
file=sys.stderr)
print('*', '*' * 10, '*', sep='\n', file=sys.stderr)
|
mit
| 4,909,020,035,630,128,000
| 41.248175
| 106
| 0.503283
| false
| 4.423386
| false
| false
| false
|
CloCkWeRX/rabbitvcs
|
rabbitvcs/vcs/git/gittyup/client.py
|
1
|
62939
|
#
# client.py
#
import os
import os.path
import re
import shutil
import fnmatch
import time
from string import ascii_letters, digits
from datetime import datetime
from mimetypes import guess_type
import subprocess
import dulwich.errors
import dulwich.repo
import dulwich.objects
from dulwich.pack import Pack
from dulwich.index import commit_index, write_index_dict, SHA1Writer
#from dulwich.patch import write_tree_diff
from exceptions import *
import util
from objects import *
from config import GittyupLocalFallbackConfig
from command import GittyupCommand
TZ = -1 * time.timezone
ENCODING = "UTF-8"
def callback_notify_null(val):
pass
def callback_get_user():
from pwd import getpwuid
pwuid = getpwuid(os.getuid())
user = pwuid[0]
fullname = pwuid[4]
host = os.getenv("HOSTNAME")
return (fullname, "%s@%s" % (user, host))
def callback_get_cancel():
return False
def get_tmp_path(filename):
tmpdir = "/tmp/rabbitvcs"
if not os.path.isdir(tmpdir):
os.mkdir(tmpdir)
class GittyupClient:
def __init__(self, path=None, create=False):
self.callback_notify = callback_notify_null
self.callback_progress_update = None
self.callback_get_user = callback_get_user
self.callback_get_cancel = callback_get_cancel
self.global_ignore_patterns = []
self.git_version = None
self.numberOfCommandStages = 0
self.numberOfCommandStagesExecuted = 0
if path:
try:
self.repo = dulwich.repo.Repo(path)
self._load_config()
self.global_ignore_patterns = self._get_global_ignore_patterns()
except dulwich.errors.NotGitRepository:
if create:
self.initialize_repository(path)
self.global_ignore_patterns = self._get_global_ignore_patterns()
else:
raise NotRepositoryError()
else:
self.repo = None
#
# Start Private Methods
#
def _initialize_index(self):
index_path = self.repo.index_path()
f = open(index_path, "wb")
try:
f = SHA1Writer(f)
write_index_dict(f, {})
finally:
f.close()
def _get_index(self):
if self.repo.has_index() == False:
self._initialize_index()
return self.repo.open_index()
def _get_tree_at_head(self):
try:
tree = self.repo[self.repo[self.repo.head()].tree]
except KeyError, e:
tree = dulwich.objects.Tree()
return tree
def _get_working_tree(self):
return self.repo[commit_index(self.repo.object_store, self._get_index())]
def _get_tree_from_sha1(self, sha1):
return self.repo[self.repo[sha1].tree]
def _get_tree_index(self, tree=None):
if tree is None:
tree = self._get_tree_at_head()
tree_index = {}
if tree:
for item in self.repo.object_store.iter_tree_contents(tree.id):
tree_index[item[0]] = (item[1], item[2])
return tree_index
def _get_git_version(self):
"""
Gets the local git version
"""
if self.git_version:
return self.git_version
else:
try:
proc = subprocess.Popen(["git", "--version"], stdout=subprocess.PIPE)
response = proc.communicate()[0].split()
version = response[2].split(".")
self.git_version = version
return self.git_version
except Exception, e:
return None
def _version_greater_than(self, version1, version2):
len1 = len(version1)
len2 = len(version2)
max = 5
# Pad the version lists so they are the same length
if max > len1:
version1 += [0] * (max-len1)
if max > len2:
version2 += [0] * (max-len2)
if version1[0] > version2[0]:
return True
if (version1[0] == version2[0]
and version1[1] > version2[1]):
return True
if (version1[0] == version2[0]
and version1[1] == version2[1]
and version1[2] > version2[2]):
return True
if (version1[0] == version2[0]
and version1[1] == version2[1]
and version1[2] == version2[2]
and version1[3] > version2[3]):
return True
if (version1[0] == version2[0]
and version1[1] == version2[1]
and version1[2] == version2[2]
and version1[3] == version2[3]
and version1[4] > version2[4]):
return True
return False
def _get_global_ignore_patterns(self):
"""
Get ignore patterns from $GIT_DIR/info/exclude then from
core.excludesfile in gitconfig.
"""
patterns = []
files = self.get_global_ignore_files()
for path in files:
patterns += self.get_ignore_patterns_from_file(path)
return patterns
def get_global_ignore_files(self):
"""
Returns a list of ignore files possible for this repository
"""
try:
git_dir = os.environ["GIT_DIR"]
except KeyError:
git_dir = os.path.join(self.repo.path, ".git")
files = []
excludefile = os.path.join(git_dir, "info", "exclude")
files.append(excludefile)
try:
core_excludesfile = self.config.get("core", "excludesfile")
if core_excludesfile:
files.append(core_excludesfile)
except KeyError:
pass
return files
def get_local_ignore_file(self, path):
if not os.path.exists(path):
return []
if os.path.isfile(path):
path = os.path.basename(path)
return os.path.join(path, ".gitignore")
def get_ignore_patterns_from_file(self, path):
"""
Read in an ignore patterns file (i.e. .gitignore, $GIT_DIR/info/exclude)
and return a list of patterns
"""
patterns = []
if os.path.isfile(path):
file = open(path, "r")
try:
for line in file:
if line == "" or line.startswith("#"):
continue
patterns.append(line.rstrip("\n"))
finally:
file.close()
return patterns
def get_local_config_file(self):
try:
git_dir = os.environ["GIT_DIR"]
except KeyError:
git_dir = os.path.join(self.repo.path, ".git")
return git_dir + "/config"
def _ignore_file(self, patterns, filename):
"""
Determine whether the given file should be ignored
"""
for pattern in patterns:
if fnmatch.fnmatch(filename, pattern) and not pattern.startswith("!"):
return True
return False
def _read_directory_tree(self, path, show_ignored_files=False):
files = []
directories = []
for root, dirs, filenames in os.walk(path, topdown=True):
try:
dirs.remove(".git")
removed_git_dir = True
except ValueError:
pass
# Find the relative root path of this folder
if root == self.repo.path:
rel_root = ""
else:
rel_root = self.get_relative_path(root)
for filename in filenames:
files.append(os.path.join(rel_root, filename))
for _d in dirs:
directories.append(os.path.join(rel_root, _d))
directories.append(rel_root)
#Remove duplicates in list
directories=list(set(directories))
return (sorted(files), directories)
def _get_blob_from_file(self, path):
file = open(path, "rb")
try:
blob = dulwich.objects.Blob.from_string(file.read())
finally:
file.close()
return blob
def _write_blob_to_file(self, path, blob):
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
file = open(path, "wb")
try:
file.write(blob.data)
finally:
file.close()
def _load_config(self):
self.config = GittyupLocalFallbackConfig(self.repo.path)
def _get_config_user(self):
try:
config_user_name = self.config.get("user", "name")
config_user_email = self.config.get("user", "email")
if config_user_name == "" or config_user_email == "":
raise KeyError()
except KeyError:
(config_user_name, config_user_email) = self.callback_get_user()
if config_user_name == None and config_user_email == None:
return None
self.config.set("user", "name", config_user_name)
self.config.set("user", "email", config_user_email)
self.config.write()
return "%s <%s>" % (config_user_name, config_user_email)
def _write_packed_refs(self, refs):
packed_refs_str = ""
for ref,sha in refs.items():
packed_refs_str = "%s %s\n" % (sha, ref)
fd = open(os.path.join(self.repo.controldir(), "packed-refs"), "wb")
fd.write(packed_refs_str)
fd.close()
def _remove_from_index(self, index, key):
del index._byname[key]
#
# Start Public Methods
#
def initialize_repository(self, path, bare=False):
if not os.path.isdir(path):
os.mkdir(path)
cmd = ["git", "init"]
if bare:
cmd.append("--bare")
cmd.append(path)
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def set_repository(self, path):
try:
self.repo = dulwich.repo.Repo(path)
self._load_config()
except dulwich.errors.NotGitRepository:
raise NotRepositoryError()
def get_repository(self):
return self.repo.path
def find_repository_path(self, path):
path_to_check = path
while path_to_check != "/" and path_to_check != "":
if os.path.isdir(os.path.join(path_to_check, ".git")):
return path_to_check
path_to_check = os.path.split(path_to_check)[0]
return None
def get_relative_path(self, path):
if path == self.repo.path:
return ""
return util.relativepath(self.repo.path, path)
def get_absolute_path(self, path):
return os.path.join(self.repo.path, path).rstrip("/")
def track(self, name):
self.repo.refs.set_symbolic_ref("HEAD", name)
def is_tracking(self, name):
return (self.repo.refs.read_ref("HEAD")[5:] == name)
def tracking(self):
return self.repo.refs.read_ref("HEAD")[5:]
def head(self):
return self.repo.refs["HEAD"]
def get_sha1_from_refspec(self, refspec):
if refspec in self.repo.refs:
return self.repo.refs[refspec]
else:
return None
def stage(self, paths):
"""
Stage files to be committed or tracked
@type paths: list
@param paths: A list of files
"""
index = self._get_index()
if type(paths) in (str, unicode):
paths = [paths]
for path in paths:
relative_path = self.get_relative_path(path)
absolute_path = self.get_absolute_path(path)
blob = self._get_blob_from_file(absolute_path)
if relative_path in index:
(ctime, mtime, dev, ino, mode, uid, gid, size, blob_id, flags) = index[relative_path]
else:
flags = 0
# make sure mtime and ctime is updated every time a file is staged
(mode, ino, dev, nlink, uid, gid, size, atime, mtime, ctime) = os.stat(path)
index[relative_path] = (ctime, mtime, dev, ino, mode, uid, gid, size, blob.id, flags)
index.write()
self.notify({
"action": "Staged",
"path": absolute_path,
"mime_type": guess_type(absolute_path)[0]
})
self.repo.object_store.add_object(blob)
def stage_all(self):
"""
Stage all files in a repository to be committed or tracked
"""
index = self._get_index()
for status in self.status():
if status in [AddedStatus, RemovedStatus, ModifiedStatus]:
abs_path = self.get_absolute_path(status.path)
if os.path.isfile(abs_path):
self.stage(abs_path)
if status == MissingStatus:
self._remove_from_index(index, status.path)
index.write()
def unstage(self, paths):
"""
Unstage files so they are not committed or tracked
@type paths: list
@param paths: A list of files
"""
index = self._get_index()
tree = self._get_tree_index()
if type(paths) in (str, unicode):
paths = [paths]
for path in paths:
relative_path = self.get_relative_path(path)
if relative_path in index:
if relative_path in tree:
(ctime, mtime, dev, ino, mode, uid, gid, size, blob_id, flags) = index[relative_path]
(mode, blob_id) = tree[relative_path]
# If the file is locally modified, set these vars to 0
# I'm not sure yet why this needs to happen, but it does
# in order for the file to appear modified and not normal
blob = self._get_blob_from_file(path)
if blob.id != blob_id:
ctime = 0
mtime = 0
dev = 0
ino = 0
uid = 0
gid = 0
size = 0
index[relative_path] = (ctime, mtime, dev, ino, mode, uid, gid, size, blob_id, flags)
else:
self._remove_from_index(index, relative_path)
else:
if relative_path in tree:
index[relative_path] = (0, 0, 0, 0, tree[relative_path][0], 0, 0, 0, tree[relative_path][1], 0)
self.notify({
"action": "Unstaged",
"path": path,
"mime_type": guess_type(path)[0]
})
index.write()
def unstage_all(self):
"""
Unstage all files so they are not committed or tracked
@type paths: list
@param paths: A list of files
"""
index = self._get_index()
for status in self.status():
abs_path = self.get_absolute_path(status.path)
if os.path.isfile(abs_path):
self.unstage(abs_path)
def get_staged(self):
"""
Gets a list of files that are staged
"""
staged = []
tree = self._get_tree_at_head()
index = self._get_index()
if len(tree) > 0:
for item in index.changes_from_tree(self.repo.object_store, tree.id):
((old_name, new_name), (old_mode, new_mode), (old_sha, new_sha)) = item
if new_name:
staged.append(new_name)
if old_name and old_name != new_name:
staged.append(old_name)
else:
for path in index:
staged.append(path)
return staged
def is_staged(self, path, staged_files=None):
"""
Determines if the specified path is staged
@type path: string
@param path: A file path
@rtype boolean
"""
if not staged_files:
staged_files = self.get_staged()
relative_path = self.get_relative_path(path)
return (relative_path in staged_files)
def branch(self, name, commit_sha=None, track=False):
"""
Create a new branch
@type name: string
@param name: The name of the new branch
@type commit_sha: string
@param commit_sha: A commit sha to branch from. If None, branches
from head
@type track: boolean
@param track: Whether or not to track the new branch, or just create it
"""
cmd = ["git", "branch"]
if track:
cmd.append("-t")
cmd += [name, commit_sha]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def branch_delete(self, name):
"""
Delete a branch
@type name: string
@param name: The name of the branch
"""
ref_name = "refs/heads/%s" % name
refs = self.repo.get_refs()
if ref_name in refs:
if self.is_tracking(ref_name):
self.track("refs/heads/master")
del self.repo.refs[ref_name]
def branch_rename(self, old_name, new_name):
"""
Rename a branch
@type old_name: string
@param old_name: The name of the branch to be renamed
@type new_name: string
@param new_name: The name of the new branch
"""
old_ref_name = "refs/heads/%s" % old_name
new_ref_name = "refs/heads/%s" % new_name
refs = self.repo.get_refs()
if old_ref_name in refs:
self.repo.refs[new_ref_name] = self.repo.refs[old_ref_name]
if self.is_tracking(old_ref_name):
self.track(new_ref_name)
del self.repo.refs[old_ref_name]
def branch_list(self, commit_sha=None):
"""
List all branches
"""
"""
refs = self.repo.get_refs()
branches = []
for ref,branch_sha in refs.items():
if ref.startswith("refs/heads"):
branch = Branch(ref[11:], branch_sha, self.repo[branch_sha])
branches.append(branch)
return branches
"""
cmd = ["git", "branch", "-lv", "--no-abbrev"]
if commit_sha:
cmd += ["--contains", commit_sha]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
branches = []
for line in stdout:
if not line:
continue
components = line.split()
if components[0] != "*":
components.insert(0, "")
tracking = components.pop(0) == "*" and True or False
if components[0] == "(no":
name = components.pop(0) + " " + components.pop(0)
else:
name = components.pop(0)
revision = components.pop(0)
message = " ".join(components)
branches.append({
"tracking": tracking,
"name": name,
"revision": revision,
"message": message
})
return branches
def checkout(self, paths=[], revision="HEAD"):
"""
Checkout a series of paths from a tree or commit. If no tree or commit
information is given, it will check out the files from head. If no
paths are given, all files will be checked out from head.
@type paths: list
@param paths: A list of files to checkout
@type revision: string
@param revision: The sha or branch to checkout
"""
if len(paths) == 1 and paths[0] == self.repo.path:
paths = []
cmd = ["git", "checkout", "-m", revision] + paths
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def clone(self, host, path, bare=False, origin="origin"):
"""
Clone a repository
@type host: string
@param host: The url of the git repository
@type path: string
@param path: The path to clone to
@type bare: boolean
@param bare: Create a bare repository or not
@type origin: string
@param origin: Specify the origin of the repository
"""
self.numberOfCommandStages = 3
more = ["-o", "origin","--progress"]
if bare:
more.append("--bare")
base_dir = os.path.split(path)[0]
cmd = ["git", "clone", host, path] + more
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=base_dir, notify=self.notify_and_parse_progress, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def commit(self, message, parents=None, committer=None, commit_time=None,
commit_timezone=None, author=None, author_time=None,
author_timezone=None, encoding=None, commit_all=False):
"""
Commit staged files to the local repository
@type message: string
@param message: The log message
@type parents: list
@param parents: A list of parent SHAs. Defaults to head.
@type committer: string
@param committer: The person committing. Defaults to
"user.name <user.email>"
@type commit_time: int
@param commit_time: The commit time. Defaults to time.time()
@type commit_timezone: int
@param commit_timezone: The commit timezone.
Defaults to (-1 * time.timezone)
@type author: string
@param author: The author of the file changes. Defaults to
"user.name <user.email>"
@type author_time: int
@param author_time: The author time. Defaults to time.time()
@type author_timezone: int
@param author_timezone: The author timezone.
Defaults to (-1 * time.timezone)
@type encoding: string
@param encoding: The encoding of the commit. Defaults to UTF-8.
@type commit_all: boolean
@param commit_all: Stage all changed files before committing
"""
if not committer:
committer = self._get_config_user()
if not committer:
raise GittyupCommandError("A committer was not specified")
if not author:
author = self._get_config_user()
if not author:
raise GittyupCommandError("An author was not specified")
if commit_all:
self.stage_all()
commit = dulwich.objects.Commit()
commit.message = message
commit.tree = commit_index(self.repo.object_store, self._get_index())
initial_commit = False
try:
commit.parents = (parents and parents or [self.repo.head()])
except KeyError:
# The initial commit has no parent
initial_commit = True
pass
commit.committer = committer
commit.commit_time = (commit_time and commit_time or int(time.time()))
commit.commit_timezone = (commit_timezone and commit_timezone or TZ)
commit.author = author
commit.author_time = (author_time and author_time or int(time.time()))
commit.author_timezone = (author_timezone and author_timezone or TZ)
commit.encoding = (encoding and encoding or ENCODING)
self.repo.object_store.add_object(commit)
self.repo.refs["HEAD"] = commit.id
if initial_commit:
self.track("refs/heads/master")
# Get the branch for this repository.
branch_full = self.repo.refs.read_ref("HEAD")
if (branch_full != None):
branch_components = re.search("refs/heads/(.+)", branch_full)
if (branch_components != None):
branch = branch_components.group(1)
self.notify("[" + commit.id + "] -> " + branch)
self.notify("To branch: " + branch)
#Print tree changes.
#dulwich.patch.write_tree_diff(sys.stdout, self.repo.object_store, commit.tree, commit.id)
return commit.id
def remove(self, paths):
"""
Remove path from the repository. Also deletes the local file.
@type paths: list
@param paths: A list of paths to remove
"""
if type(paths) in (str, unicode):
paths = [paths]
index = self._get_index()
for path in paths:
relative_path = self.get_relative_path(path)
if relative_path in index:
self._remove_from_index(index, relative_path)
os.remove(path)
index.write()
def move(self, source, dest):
"""
Move a file within the repository
@type source: string
@param source: The source file
@type dest: string
@param dest: The destination. If dest exists as a directory, source
will be added as a child. Otherwise, source will be renamed to
dest.
"""
index = self._get_index()
relative_source = self.get_relative_path(source)
relative_dest = self.get_relative_path(dest)
# Get a list of affected files so we can update the index
source_files = []
if os.path.isdir(source):
for name in index:
if name.startswith(relative_source):
source_files.append(name)
else:
source_files.append(self.get_relative_path(source))
# Rename the affected index entries
for source_file in source_files:
new_path = source_file.replace(relative_source, relative_dest)
if os.path.isdir(dest):
new_path = os.path.join(new_path, os.path.basename(source_file))
index[new_path] = index[source_file]
self._remove_from_index(index, source_file)
index.write()
# Actually move the file/folder
shutil.move(source, dest)
def pull(self, repository="origin", refspec="master"):
"""
Fetch objects from a remote repository and merge with the local
repository
@type repository: string
@param repository: The name of the repository
@type refspec: string
@param refspec: The branch name to pull from
"""
self.numberOfCommandStages = 2
cmd = ["git", "pull","--progress", repository, refspec]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify_and_parse_git_pull, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def push(self, repository="origin", refspec="master"):
"""
Push objects from the local repository into the remote repository
and merge them.
@type repository: string
@param repository: The name of the repository
@type refspec: string
@param refspec: The branch name to pull from
"""
self.numberOfCommandStages = 2
cmd = ["git", "push", "--progress", repository, refspec]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify_and_parse_git_push, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def fetch(self, host):
"""
Fetch objects from a remote repository. This will not merge the files
into the local working copy, use pull for that.
@type host: string
@param host: The git url from which to fetch
"""
client, host_path = util.get_transport_and_path(host)
graphwalker = self.repo.get_graph_walker()
f, commit = self.repo.object_store.add_pack()
refs = client.fetch_pack(host_path, self.repo.object_store.determine_wants_all,
graphwalker, f.write, self.callback_notify)
commit()
return refs
def merge(self, branch):
cmd = ["git", "merge", branch]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def remote_add(self, name, host):
"""
Add a remote repository
@type name: string
@param name: The name to give to the remote repository
@type host: string
@param host: The git url to add
"""
cmd = ["git", "remote", "add", name, host]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def remote_rename(self, current_name, new_name):
"""
Rename a remote repository
@type current_name: string
@param current_name: The current name of the repository
@type new_name: string
@param new_name: The name to give to the remote repository
"""
cmd = ["git", "remote", "rename", current_name, new_name]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def remote_set_url(self, name, url):
"""
Change a remote repository's url
@type name: string
@param name: The name of the repository
@type url: string
@param url: The url for the repository
"""
cmd = ["git", "remote", "set-url", name, url]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def remote_delete(self, name):
"""
Remove a remote repository
@type name: string
@param name: The name of the remote repository to remove
"""
cmd = ["git", "remote", "rm", name]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
def remote_list(self):
"""
Return a list of the remote repositories
@rtype list
@return A list of dicts with keys: remote, url, fetch
"""
cmd = ["git", "remote", "-v"]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
stdout = []
returner = []
for line in stdout:
components = line.split()
if components:
name = components[0]
host = components[1]
add = True
for item in returner:
if item["name"] == name:
add = False
if add:
returner.append({
"name": name,
"host": host
})
return returner
def tag(self, name, message, revision="HEAD"):
"""
Create a tag object
@type name: string
@param name: The name to give the tag
@type message: string
@param message: A log message
@type revision: string
@param revision: The revision to tag. Defaults to HEAD
"""
self._get_config_user()
cmd = ["git", "tag", "-m", message, name, revision]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
return
def tag_delete(self, name):
"""
Delete a tag
@type name: string
@param name: The name of the tag to delete
"""
ref_name = "refs/tags/%s" % name
refs = self.repo.get_refs()
if ref_name in refs:
del self.repo.refs[ref_name]
def tag_list(self):
"""
Return a list of Tag objects
"""
refs = self.repo.get_refs()
tags = []
for ref,tag_sha in refs.items():
if ref.startswith("refs/tags"):
if type(self.repo[tag_sha]) == dulwich.objects.Commit:
tag = CommitTag(ref[10:], tag_sha, self.repo[tag_sha])
else:
tag = Tag(tag_sha, self.repo[tag_sha])
tags.append(tag)
return tags
def status_porcelain(self, path):
if os.path.isdir(path):
(files, directories) = self._read_directory_tree(path)
else:
files = [self.get_relative_path(path)]
directories = []
files_hash = {}
for file in files:
files_hash[file] = True
cmd = ["git", "status", "--porcelain", path]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify).execute()
except GittyupCommandError, e:
self.callback_notify(e)
statuses = []
modified_files = []
for line in stdout:
components = re.match("^([\sA-Z\?]+)\s(.*?)$", line)
if components:
status = components.group(1)
strip_status = status.strip()
path = components.group(2)
if status == " D":
statuses.append(MissingStatus(path))
elif strip_status in ["M", "R", "U"]:
statuses.append(ModifiedStatus(path))
elif strip_status in ["A", "C"]:
statuses.append(AddedStatus(path))
elif strip_status == "D":
statuses.append(RemovedStatus(path))
elif strip_status == "??":
statuses.append(UntrackedStatus(path))
modified_files.append(path)
try:
del files_hash[path]
except Exception, e:
pass
# Determine untracked directories
cmd = ["git", "clean", "-nd", self.repo.path]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify).execute()
except GittyupCommandError, e:
self.callback_notify(e)
untracked_directories = []
for line in stdout:
components = re.match("^(Would remove)\s(.*?)$", line)
untracked_path = components.group(2)
if untracked_path[-1]=='/':
untracked_directories.append(untracked_path[:-1])
#Determine the ignored files and directories in Repo
cmd = ["git", "clean", "-ndX", self.repo.path]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify).execute()
except GittyupCommandError, e:
self.callback_notify(e)
ignored_directories=[]
for line in stdout:
components = re.match("^(Would remove)\s(.*?)$", line)
ignored_path=components.group(2)
if ignored_path[-1]=='/':
ignored_directories.append(ignored_path[:-1])
next
statuses.append(IgnoredStatus(ignored_path))
self.ignored_paths.append(ignored_path)
try:
del files_hash[ignored_path]
except Exception, e:
pass
for file,data in files_hash.items():
ignore_file=False
untracked_file=False
for ignored_path in ignored_directories:
if ignored_path in file:
ignore_file=True
break
for untracked_path in untracked_directories:
if untracked_path in file:
untracked_file=True
break
if untracked_file==True:
statuses.append(UntrackedStatus(file))
if ignore_file==True:
self.ignored_paths.append(file)
elif ignore_file==True:
statuses.append(IgnoredStatus(file))
self.ignored_paths.append(file)
else:
statuses.append(NormalStatus(file))
# Determine status of folders based on child contents
for d in directories:
d_status = NormalStatus(d)
# Check if directory is untracked or a sub-directory of an untracked directory
for untracked_path in untracked_directories:
if untracked_path in d:
d_status = UntrackedStatus(d)
break
# Check if directory includes modified files
for file in modified_files:
if file.startswith(d):
d_status = ModifiedStatus(d)
break
# Check if directory is ignored
for ignored_path in ignored_directories:
if ignored_path in d:
d_status = IgnoredStatus(d)
break
statuses.append(d_status)
return statuses
def status_dulwich(self, path):
tree = self._get_tree_index()
index = self._get_index()
if os.path.isdir(path):
(files, directories) = self._read_directory_tree(path)
else:
files = [self.get_relative_path(path)]
directories = []
files_hash = {}
for file in files:
files_hash[file] = True
statuses = []
# Calculate statuses for files in the current HEAD
modified_files = []
for name in tree:
try:
if index[name]:
inIndex = True
except Exception, e:
inIndex = False
if inIndex:
absolute_path = self.get_absolute_path(name)
if os.path.isfile(absolute_path):
# Cached, determine if modified or not
blob = self._get_blob_from_file(absolute_path)
if blob.id == tree[name][1]:
statuses.append(NormalStatus(name))
else:
modified_files.append(name)
statuses.append(ModifiedStatus(name))
else:
modified_files.append(name)
statuses.append(MissingStatus(name))
else:
modified_files.append(name)
statuses.append(RemovedStatus(name))
try:
del files_hash[name]
except Exception, e:
pass
# Calculate statuses for untracked files
for name,data in files_hash.items():
try:
inTreeIndex = tree[name]
except Exception, e:
inTreeIndex = False
try:
inIndex = index[name]
except Exception, e:
inIndex = False
if inIndex and not inTreeIndex:
modified_files.append(name)
statuses.append(AddedStatus(name))
continue
# Generate a list of appropriate ignore patterns
patterns = []
path_to_check = os.path.dirname(self.get_absolute_path(name))
while path_to_check != self.repo.path:
patterns += self.get_ignore_patterns_from_file(self.get_local_ignore_file(path_to_check))
path_to_check = os.path.split(path_to_check)[0]
patterns += self.get_ignore_patterns_from_file(self.get_local_ignore_file(self.repo.path))
patterns += self.global_ignore_patterns
if not self._ignore_file(patterns, os.path.basename(name)):
statuses.append(UntrackedStatus(name))
else:
self.ignored_paths.append(name)
# Determine status of folders based on child contents
for d in directories:
d_status = NormalStatus(d)
for file in modified_files:
if os.path.join(d, os.path.basename(file)) == file:
d_status = ModifiedStatus(d)
break
statuses.append(d_status)
return statuses
def get_all_ignore_file_paths(self, path):
return self.ignored_paths
def status(self, path):
# TODO - simply get this from the status implementation / avoid global state
self.ignored_paths = []
version = self._get_git_version()
if version and self._version_greater_than(version, [1,7,-1]):
return self.status_porcelain(path)
else:
return self.status_dulwich(path)
def log(self, path="", skip=0, limit=None, revision="", showtype="all"):
cmd = ["git", "--no-pager", "log", "--numstat", "--parents", "--pretty=fuller",
"--date-order"]
if showtype == "all":
cmd.append("--all")
if limit:
cmd.append("-%s" % limit)
if skip:
cmd.append("--skip=%s" % skip)
if revision:
cmd.append(revision)
if path == self.repo.path:
path = ""
if path:
cmd += ["--", path]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
return []
revisions = []
revision = {}
changed_file = {}
for line in stdout:
if line == "":
continue
if line[0:6] == "commit":
if revision:
if "changed_paths" not in revision:
revision["changed_paths"] = {}
revisions.append(revision)
revision = {}
changed_file = {}
commit_line = line.split(" ")
revision["commit"] = commit_line[1]
revision["parents"] = []
for parent in commit_line[2:]:
revision["parents"].append(parent)
elif line[0:7] == "Author:":
revision["author"] = line[7:].strip()
elif line[0:11] == "AuthorDate:":
revision["author_date"] = line[11:].strip()
elif line[0:7] == "Commit:":
revision["committer"] = line[7:].strip()
elif line[0:11] == "CommitDate:":
revision["commit_date"] = line[11:].strip()
elif line[0:4] == " ":
message = line[4:]
if "message" not in revision:
revision["message"] = ""
else:
revision["message"] += "\n"
revision["message"] = revision["message"] + message
elif line[0].isdigit() or line[0] in "-":
file_line = line.split("\t")
if not changed_file:
revision["changed_paths"] = []
if len(file_line) == 3:
changed_file = {
"additions": file_line[0],
"removals": file_line[1],
"path": file_line[2]
}
revision["changed_paths"].append(changed_file)
if revision:
revisions.append(revision)
return revisions
def annotate(self, path, revision_obj="HEAD"):
"""
Returns an annotation for a specified file
@type path: string
@param path: The absolute path to a tracked file
@type revision: string
@param revision: HEAD or a sha1 hash
"""
relative_path = self.get_relative_path(path)
cmd = ["git", "annotate", "-l", revision_obj, relative_path]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
stdout = []
returner = []
for line in stdout:
components = re.split("\t", line, 3)
if len(components) < 4:
continue
dt = datetime(*time.strptime(components[2][:-6],"%Y-%m-%d %H:%M:%S")[:-2])
message = components[3].split(")", 1)
code = message[1]
if len(components) == 5:
code = components[4]
returner.append({
"revision": components[0],
"author": components[1][1:],
"date": dt,
"line": code,
"number": message[0]
})
return returner
def show(self, path, revision_obj):
"""
Returns a particular file at a given revision object.
@type path: string
@param path: The absolute path to a file
@type revision_obj: git.Revision()
@param revision_obj: The revision object for path
"""
if not revision_obj:
revision_obj = "HEAD"
relative_path = self.get_relative_path(path)
cmd = ["git", "show", "%s:%s" % (revision_obj, relative_path)]
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
stdout = []
return "\n".join(stdout)
def diff(self, path1, revision_obj1, path2=None, revision_obj2=None, summarize=False):
"""
Returns the diff between the path(s)/revision(s)
@type path1: string
@param path1: The absolute path to a file
@type revision_obj1: git.Revision()
@param revision_obj1: The revision object for path1
@type path2: string
@param path2: The absolute path to a file
@type revision_obj2: git.Revision()
@param revision_obj2: The revision object for path2
"""
relative_path1 = None
relative_path2 = None
if path1:
relative_path1 = self.get_relative_path(path1)
if path2:
relative_path2 = self.get_relative_path(path2)
cmd = ["git", "diff"]
if revision_obj1:
cmd += [revision_obj1]
if revision_obj2 and path2:
cmd += [revision_obj2]
if relative_path1:
cmd += [relative_path1]
if relative_path2 and relative_path2 != relative_path1:
cmd += [relative_path2]
if summarize:
cmd.append("--name-status")
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
stdout = []
return "\n".join(stdout)
def diff_summarize(self, path1, revision_obj1, path2=None, revision_obj2=None):
results = self.diff(path1, revision_obj1, path2, revision_obj2, True)
summary = []
for line in results.split("\n"):
if not line:
continue
(action, path) = line.split("\t")
summary.append({
"action": action,
"path": path
})
return summary
def export(self, path, dest_path, revision):
"""
Exports a file or directory from a given revision
@type path: string
@param path: The source file/folder to export
@type dest_path: string
@param dest_path: The path to put the exported file(s)
@type revision: string
@param revision: The revision/tree/commit of the source file being exported
"""
tmp_file = get_tmp_path("rabbitvcs-git-export.tar")
cmd1 = ["git", "archive", "--format", "tar", "-o", tmp_file, revision, path]
cmd2 = ["tar", "-xf", tmp_file, "-C", dest_path]
if not os.path.isdir(dest_path):
os.mkdir(dest_path)
try:
(status, stdout, stderr) = GittyupCommand(cmd1, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
(status, stdout, stderr) = GittyupCommand(cmd2, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
stdout = []
self.notify("%s at %s exported to %s" % (path, revision, dest_path))
return "\n".join(stdout)
def clean(self, path, remove_dir=True, remove_ignored_too=False,
remove_only_ignored=False, dry_run=False, force=True):
cmd = ["git", "clean"]
if remove_dir:
cmd.append("-d")
if remove_ignored_too:
cmd.append("-x")
if remove_only_ignored:
cmd.append("-X")
if dry_run:
cmd.append("-n")
if force:
cmd.append("-f")
relative_path = self.get_relative_path(path)
cmd.append(relative_path)
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
return
def reset(self, path, revision, type=None):
relative_path = self.get_relative_path(path)
cmd = ["git", "reset"]
if type:
cmd.append("--%s" % type)
cmd.append(revision)
if relative_path:
cmd.append(relative_path)
try:
(status, stdout, stderr) = GittyupCommand(cmd, cwd=self.repo.path, notify=self.notify, cancel=self.get_cancel).execute()
except GittyupCommandError, e:
self.callback_notify(e)
return
def set_callback_notify(self, func):
self.callback_notify = func
def set_callback_progress_update(self, func):
self.callback_progress_update = func
def set_callback_get_user(self, func):
self.callback_get_user = func
def set_callback_get_cancel(self, func):
self.callback_get_cancel = func
def notify(self, data):
self.callback_notify(data)
def notify_and_parse_progress(self, data):
# When progress is requested to a git command, it will
# respond with the current operation, and that operations current progress
# in the following format: "<Command>: <percentage>% (<pieces compeated>/<num pieces>)".
#
# When a command has reached 100% the format of this final message assumes the formatt:
# "<Command>: 100% (<num pieces>/<num pieces>), <total size> <unit>, done."
returnData = {"action":"","path":"","mime_type":""}
#print "parsing message: " + str(data)
# If data is already a dict, we'll assume it's already been parsed, and return.
if isinstance (data, dict):
self.notify (data);
return
# Is this an error?
message_components = re.search("^([eE]rror|[fF]atal): (.+)", data)
if message_components != None:
returnData["action"] = "Error"
returnData["path"] = message_components.group(2)
self.notify (returnData)
return
# Check to see if this is a remote command.
remote_check = re.search("^(remote: )(.+)$", data)
if remote_check != None:
returnData["action"] = "Remote"
message = remote_check.group(2)
else:
message = data
# First, we'll test to see if this is a progress notification.
if "%" not in message:
# No, this is just a regular message.
# Some messages have a strage tendancy to append a non-printable character,
# followed by a right square brace and a capitol "K". This tests for, and
# strips these superfluous characters.
message_components = re.search("^(.+).\[K", message)
if message_components != None:
returnData["path"] = message_components.group(1)
else:
returnData["path"] = message
self.notify (returnData)
return
# Extract the percentage, which will be all numerals directly
# prior to '%'.
message_components = re.search("^(.+): +([0-9]+)%", message)
if message_components == None:
print "Error: failed to parse git string: " + data
return
fraction = float(message_components.group(2)) / 100 # Convert percentage to fraction.
current_action = message_components.group(1)
# If we're at 0%, then we want to notify which action we're performing.
if fraction == 0:
returnData["path"] = current_action
self.notify(returnData)
#print "stage fraction: " + str (fraction)
# If we're using a number of stages, adjust the fraction acordingly.
if self.numberOfCommandStages > 0:
fraction = (self.numberOfCommandStagesExecuted + fraction) / self.numberOfCommandStages
# If we've finished the current stage (100%).
if "done" in message:
self.numberOfCommandStagesExecuted += 1
# If we've registered a callback for progress, update with the new fraction.
if self.callback_progress_update != None:
#print "setting pbar: " + str(fraction)
self.callback_progress_update(fraction)
# If we've finished the whole command (all stages).
if fraction == 1 and "done" in message:
# Reset stage variables.
self.numberOfCommandStages = 0
self.numberOfCommandStagesExecuted = 0
def notify_and_parse_git_pull (self, data):
return_data = {"action":"","path":"","mime_type":""}
message_parsed = False
# Look for "From" line (e.g. "From ssh://server:22/my_project")
message_components = re.search("^From (.+)", data)
if message_components != None:
return_data["action"] = "From"
return_data["path"] = message_components.group(1)
message_parsed = True
# Look for "Branch" line (e.g. "* branch master -> FETCH_HEAD")
message_components = re.search("\* branch +([A-z0-9]+) +-> (.+)", data)
if message_components != None:
return_data["action"] = "Branch"
return_data["path"] = message_components.group(1) + " -> " + message_components.group(2)
message_parsed = True
# Look for a file line (e.g. "src/somefile.py | 5 -++++")
message_components = re.search(" +(.+) +\| *([0-9]+) ([+-]+)", data)
if message_components != None:
return_data["action"] = "Modified"
return_data["path"] = message_components.group(1)
return_data["mime_type"] = message_components.group(2) + " " + message_components.group(3)
message_parsed = True
# Look for a updating line (e.g. "Updating ffffff..ffffff")
message_components = re.search("^Updating ([a-f0-9.]+)", data)
if message_components != None:
return_data["action"] = "Updating"
return_data["path"] = message_components.group(1)
message_parsed = True
# Look for a "create mode" line (e.g. "create mode 100755 file.py")
message_components = re.search("create mode ([0-9]+) (.+)", data)
if message_components != None:
return_data["action"] = "Create"
return_data["path"] = message_components.group(2)
return_data["mime_type"] = "mode: " + message_components.group(1)
message_parsed = True
# Look for a "delete mode" line (e.g. "create mode 100755 file.py")
message_components = re.search("delete mode ([0-9]+) (.+)", data)
if message_components != None:
return_data["action"] = "Delete"
return_data["path"] = message_components.group(2)
return_data["mime_type"] = "mode: " + message_components.group(1)
message_parsed = True
# Look for an "Auto-merging" line (e.g. "Auto-merging src/file.py")
message_components = re.search("^Auto-merging (.+)", data)
if message_components != None:
return_data["action"] = "Merging"
return_data["path"] = message_components.group(1)
message_parsed = True
# Look for a "binary" line (e.g. "icons/file.png" | Bin 0 -> 55555 bytes)
message_components = re.search("^[ ](.+) +\| Bin ([0-9]+ -> [0-9]+ bytes)", data)
if message_components != None:
return_data["action"] = "Binary"
return_data["path"] = message_components.group(1)
return_data["mime_type"] = message_components.group(2)
message_parsed = True
# Look for a "rename" line (e.g. "rename src/{foo.py => bar.py} (50%)")
message_components = re.search("rename (.+}) \([0-9]+%\)", data)
if message_components != None:
return_data["action"] = "Rename"
return_data["path"] = message_components.group(1)
message_parsed = True
# Look for a "copy" line (e.g. "copy src/{foo.py => bar.py} (50%)")
message_components = re.search("copy (.+}) \([0-9]+%\)", data)
if message_components != None:
return_data["action"] = "Copy"
return_data["path"] = message_components.group(1)
message_parsed = True
# Prepend "Error" to conflict lines. e.g. :
# CONFLICT (content): Merge conflict in file.py.
# Automatic merge failed; fix conflicts and then commit the result.
message_components = re.search("^CONFLICT \(|Automatic merge failed", data)
if message_components != None:
return_data["action"] = "Error"
return_data["path"] = data
message_parsed = True
if message_parsed == False:
return_data = data
self.notify_and_parse_progress (return_data)
def notify_and_parse_git_push (self, data):
return_data = {"action":"","path":"","mime_type":""}
message_parsed = False
# Look for to line. e.g. "To gitosis@server.org:project.git". Exclude any
# lines that include a space (as this could be a message about something else)
message_components = re.search("^To ([^ ]+$)", data)
if message_components != None:
return_data["action"] = "To"
return_data["path"] = message_components.group(1)
message_parsed = True
# Look for "new branch" line. e.g. " * [new branch] master -> master"
message_components = re.search("^ \* \[new branch\] +(.+) -> (.+)", data)
if message_components != None:
return_data["action"] = "New Branch"
return_data["path"] = message_components.group(1) + " -> " + message_components.group(2)
message_parsed = True
# Look for "rejected" line. e.g. " ![rejected] master -> master (non-fast-forward)".
message_components = re.search("!\[rejected\] +(.+)", data)
if message_components != None:
return_data["action"] = "Rejected"
return_data["path"] = message_components.group(1)
message_parsed = True
if message_parsed == False:
return_data = data
self.notify_and_parse_progress (return_data)
def get_cancel(self):
return self.callback_get_cancel
|
gpl-2.0
| 601,569,765,097,626,400
| 32.073568
| 151
| 0.524556
| false
| 4.193138
| true
| false
| false
|
utlco/tcnc
|
tcnc/cam/offset.py
|
1
|
7587
|
#-----------------------------------------------------------------------------
# Copyright 2012-2016 Claude Zervas
# email: claude@utlco.com
#-----------------------------------------------------------------------------
"""
Offset Line/Arc segments in a tool path to compensate for tool trail offset.
"""
# Python 3 compatibility boilerplate
from __future__ import (absolute_import, division,
print_function, unicode_literals)
from future_builtins import *
import math
import logging
import geom
from . import toolpath
from . import util
logger = logging.getLogger(__name__)
def offset_path(path, offset, min_arc_dist, g1_tolerance=None):
"""Recalculate path to compensate for a trailing tangential offset.
This will shift all of the segments by `offset` amount. Arcs will
be recalculated to correct for the shift offset.
Args:
path: The path to recalculate.
offset: The amount of tangential tool trail.
min_arc_dist: The minimum distance between two connected
segment end points that can be bridged with an arc.
A line will be used if the distance is less than this.
g1_tolerance: The angle tolerance to determine if two segments
are g1 continuous.
Returns:
A new path
Raises:
:class:`cam.toolpath.ToolpathException`: if the path contains segment
types other than Line or Arc.
"""
if geom.float_eq(offset, 0.0):
return path;
offset_path = []
prev_seg = None
prev_offset_seg = None
for seg in path:
if seg.p1 == seg.p2:
# Skip zero length segments
continue
if isinstance(seg, geom.Line):
# Line segments are easy - just shift them forward by offset
offset_seg = seg.shift(offset)
elif isinstance(seg, geom.Arc):
offset_seg = offset_arc(seg, offset)
else:
raise toolpath.ToolpathException('Unrecognized path segment type.')
# Fix discontinuities caused by offsetting non-G1 segments
if prev_seg is not None:
if prev_offset_seg.p2 != offset_seg.p1:
seg_distance = prev_offset_seg.p2.distance(offset_seg.p1)
# If the distance between the two segments is less than the
# minimum arc distance or if the segments are G1 continuous
# then just insert a connecting line.
if (seg_distance < min_arc_dist
or geom.segments_are_g1(prev_offset_seg, offset_seg,
g1_tolerance)):
connect_seg = geom.Line(prev_offset_seg.p2, offset_seg.p1)
else:
# Insert an arc in tool path to rotate the tool to the next
# starting tangent when the segments are not G1 continuous.
# TODO: avoid creating tiny segments by extending
# offset segment.
p1 = prev_offset_seg.p2
p2 = offset_seg.p1
angle = prev_seg.p2.angle2(p1, p2)
# TODO: This should be a straight line if the arc is tiny
connect_seg = geom.Arc(p1, p2, offset, angle, prev_seg.p2)
# if connect_seg.length() < 0.01:
# logger.debug('tiny arc! length= %f, radius=%f, angle=%f', connect_seg.length(), connect_seg.radius, connect_seg.angle)
connect_seg.inline_start_angle = prev_seg.end_tangent_angle()
connect_seg.inline_end_angle = seg.start_tangent_angle()
offset_path.append(connect_seg)
prev_offset_seg = connect_seg
elif (geom.segments_are_g1(prev_seg, seg, g1_tolerance) and
not hasattr(prev_seg, 'ignore_g1') and
not hasattr(seg, 'ignore_g1')):
# Add hint for smoothing pass
prev_offset_seg.g1 = True
prev_seg = seg
prev_offset_seg = offset_seg
offset_path.append(offset_seg)
# Compensate for starting angle
start_angle = (offset_path[0].p1 - path[0].p1).angle()
offset_path[0].inline_start_angle = start_angle
return offset_path
def offset_arc(arc, offset):
"""Offset the arc by the specified offset.
"""
start_angle = arc.start_tangent_angle()
end_angle = arc.end_tangent_angle()
p1 = arc.p1 + geom.P.from_polar(offset, start_angle)
p2 = arc.p2 + geom.P.from_polar(offset, end_angle)
radius = math.hypot(offset, arc.radius)
offset_arc = geom.Arc(p1, p2, radius, arc.angle, arc.center)
offset_arc.inline_start_angle = start_angle
offset_arc.inline_end_angle = end_angle
return offset_arc
def fix_G1_path(path, tolerance, line_flatness):
"""
"""
new_path = []
if len(path) < 2:
return path
seg1 = path[0]
cp1 = seg1.p1
for seg2 in path[1:]:
if getattr(seg1, 'g1', False):
arcs, cp1 = smoothing_arcs(seg1, seg2, cp1,
tolerance=tolerance, max_depth=1,
line_flatness=line_flatness)
new_path.extend(arcs)
else:
cp1 = seg2.p1
new_path.append(seg1)
seg1 = seg2
# Process last segment...
if getattr(seg1, 'g1', False):
arcs, cp1 = smoothing_arcs(seg1, None, cp1,
tolerance=tolerance, max_depth=1,
line_flatness=line_flatness)
new_path.extend(arcs)
else:
new_path.append(seg1)
return new_path
def smoothing_arcs(seg1, seg2, cp1=None,
tolerance=0.0001, line_flatness=0.0001,
max_depth=1, match_arcs=True):
"""Create circular smoothing biarcs between two segments
that are not currently G1 continuous.
Args:
seg1: First path segment containing first and second points.
Can be a geom.Line or geom.Arc.
seg2: Second path segment containing second and third points.
Can be a geom.Line or geom.Arc.
cp1: Control point computed from previous invocation.
tolerance: Biarc matching tolerance.
line_flatness: Curve to line tolerance.
max_depth: Max Bezier subdivision recursion depth.
match_arcs: Attempt to more closely match existing arc segments.
Default is True.
Returns:
A tuple containing a list of biarc segments and the control point
for the next curve.
"""
curve, cp1 = geom.bezier.smoothing_curve(seg1, seg2, cp1, match_arcs)
# geom.debug.draw_bezier(curve, color='#00ff44') #DEBUG
biarc_segs = curve.biarc_approximation(tolerance=tolerance,
max_depth=max_depth,
line_flatness=line_flatness)
if not biarc_segs:
return ((seg1,), seg1.p2)
# Compute total arc length of biarc approximation
biarc_length = 0
for seg in biarc_segs:
biarc_length += seg.length()
# Fix inline rotation hints for each new arc segment.
a_start = util.seg_start_angle(seg1)
a_end = a_start
sweep = geom.normalize_angle(util.seg_end_angle(seg1) - a_start, center=0.0)
sweep_scale = sweep / biarc_length
for arc in biarc_segs:
a_end = a_start + (arc.length() * sweep_scale)
arc.inline_start_angle = a_start
arc.inline_end_angle = a_end
a_start = a_end
return (biarc_segs, cp1)
|
lgpl-3.0
| 6,676,673,369,152,770,000
| 39.790323
| 143
| 0.57704
| false
| 3.900771
| false
| false
| false
|
edugrasa/demonstrator
|
gen_templates.py
|
1
|
11003
|
#
# Copyright (C) 2014-2017 Nextworks
# Author: Vincenzo Maffione <v.maffione@nextworks.it>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# Template for a IPCM configuration file
ipcmconf_base = {
"configFileVersion": "1.4.1",
"localConfiguration": {
"installationPath": "%(installpath)s/bin",
"libraryPath": "%(installpath)s/lib",
"logPath": "%(varpath)s/var/log",
"consoleSocket": "%(varpath)s/var/run/ipcm-console.sock",
"system-name": "%(sysname)s",
"pluginsPaths": [
"%(installpath)s/lib/rinad/ipcp",
"/lib/modules/4.9.28-irati/extra"
]
},
"ipcProcessesToCreate": [],
"difConfigurations": [],
}
da_map_base = {
"applicationToDIFMappings": [
{
"encodedAppName": "rina.apps.echotime.server-1--",
"difName": "n.DIF"
},
{
"encodedAppName": "traffic.generator.server-1--",
"difName": "n.DIF"
}
],
}
# Template for a normal DIF configuration file
normal_dif_base = {
"difType" : "normal-ipc",
"dataTransferConstants" : {
"addressLength" : 2,
"cepIdLength" : 2,
"lengthLength" : 2,
"portIdLength" : 2,
"qosIdLength" : 2,
"rateLength" : 4,
"frameLength" : 4,
"sequenceNumberLength" : 4,
"ctrlSequenceNumberLength" : 4,
"maxPduSize" : 1470,
"maxSduSize" : 10000,
"difFragmentation" : True,
"maxPduLifetime" : 60000
},
"qosCubes" : [ {
"name" : "unreliablewithflowcontrol",
"id" : 1,
"partialDelivery" : False,
"orderedDelivery" : True,
"efcpPolicies" : {
"dtpPolicySet" : {
"name" : "default",
"version" : "0"
},
"initialATimer" : 0,
"dtcpPresent" : True,
"dtcpConfiguration" : {
"dtcpPolicySet" : {
"name" : "default",
"version" : "0"
},
"rtxControl" : False,
"flowControl" : True,
"flowControlConfig" : {
"rateBased" : False,
"windowBased" : True,
"windowBasedConfig" : {
"maxClosedWindowQueueLength" : 10,
"initialCredit" : 200
}
}
}
}
}, {
"name" : "reliablewithflowcontrol",
"id" : 2,
"partialDelivery" : False,
"orderedDelivery" : True,
"maxAllowableGap": 0,
"efcpPolicies" : {
"dtpPolicySet" : {
"name" : "default",
"version" : "0"
},
"initialATimer" : 0,
"dtcpPresent" : True,
"dtcpConfiguration" : {
"dtcpPolicySet" : {
"name" : "default",
"version" : "0"
},
"rtxControl" : True,
"rtxControlConfig" : {
"dataRxmsNmax" : 5,
"initialRtxTime" : 1000
},
"flowControl" : True,
"flowControlConfig" : {
"rateBased" : False,
"windowBased" : True,
"windowBasedConfig" : {
"maxClosedWindowQueueLength" : 10,
"initialCredit" : 200
}
}
}
}
} ],
"knownIPCProcessAddresses": [],
"addressPrefixes" : [ {
"addressPrefix" : 0,
"organization" : "N.Bourbaki"
}, {
"addressPrefix" : 16,
"organization" : "IRATI"
} ],
"rmtConfiguration" : {
"pffConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "0"
}
},
"policySet" : {
"name" : "default",
"version" : "1"
}
},
"enrollmentTaskConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "1",
"parameters" : [ {
"name" : "enrollTimeoutInMs",
"value" : "10000"
}, {
"name" : "watchdogPeriodInMs",
"value" : "30000"
}, {
"name" : "declaredDeadIntervalInMs",
"value" : "120000"
}, {
"name" : "neighborsEnrollerPeriodInMs",
"value" : "0"
}, {
"name" : "maxEnrollmentRetries",
"value" : "0"
} ]
}
},
"flowAllocatorConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "1"
}
},
"namespaceManagerConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "1"
}
},
"securityManagerConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "1"
}
},
"resourceAllocatorConfiguration" : {
"pduftgConfiguration" : {
"policySet" : {
"name" : "default",
"version" : "0"
}
}
},
"routingConfiguration" : {
"policySet" : {
"name" : "link-state",
"version" : "1",
"parameters" : [ {
"name" : "objectMaximumAge",
"value" : "10000"
},{
"name" : "waitUntilReadCDAP",
"value" : "5001"
},{
"name" : "waitUntilError",
"value" : "5001"
},{
"name" : "waitUntilPDUFTComputation",
"value" : "103"
},{
"name" : "waitUntilFSODBPropagation",
"value" : "101"
},{
"name" : "waitUntilAgeIncrement",
"value" : "997"
},{
"name" : "routingAlgorithm",
"value" : "Dijkstra"
}]
}
}
}
def ps_set(d, k, v, parms):
if k not in d:
d[k] = {'name': '', 'version': '1'}
if d[k]["name"] == v and "parameters" in d[k]:
cur_names = [p["name"] for p in d[k]["parameters"]]
for p in parms:
name, value = p.split('=')
if name in cur_names:
for i in range(len(d[k]["parameters"])):
if d[k]["parameters"][i]["name"] == name:
d[k]["parameters"][i]["value"] = value
break
else:
d[k]["parameters"].append({ 'name': name, 'value': value })
elif len(parms) > 0:
d[k]["parameters"] = [ { 'name': p.split('=')[0], 'value': p.split('=')[1]} for p in parms ]
d[k]["name"] = v
def dtp_ps_set(d, v, parms):
for i in range(len(d["qosCubes"])):
ps_set(d["qosCubes"][i]["efcpPolicies"], "dtpPolicySet", v, parms)
def dtcp_ps_set(d, v, parms):
for i in range(len(d["qosCubes"])):
ps_set(d["qosCubes"][i]["efcpPolicies"]["dtcpConfiguration"], "dtcpPolicySet", v, parms)
policy_translator = {
'rmt.pff': lambda d, v, p: ps_set(d["rmtConfiguration"]["pffConfiguration"], "policySet", v, p),
'rmt': lambda d, v, p: ps_set(d["rmtConfiguration"], "policySet", v, p),
'enrollment-task': lambda d, v, p: ps_set(d["enrollmentTaskConfiguration"], "policySet", v, p),
'flow-allocator': lambda d, v, p: ps_set(d["flowAllocatorConfiguration"], "policySet", v, p),
'namespace-manager': lambda d, v, p: ps_set(d["namespaceManagerConfiguration"], "policySet", v, p),
'security-manager': lambda d, v, p: ps_set(d["securityManagerConfiguration"], "policySet", v, p),
'routing': lambda d, v, p: ps_set(d["routingConfiguration"], "policySet", v, p),
'resource-allocator.pduftg': lambda d, v, p: ps_set(d["resourceAllocatorConfiguration"], "policySet", v, p),
'efcp.*.dtcp': None,
'efcp.*.dtp': None,
}
def is_security_path(path):
sp = path.split('.')
return (len(sp) == 3) and (sp[0] == 'security-manager') and (sp[1] in ['auth', 'encrypt', 'ttl', 'errorcheck'])
# Do we know this path ?
def policy_path_valid(path):
if path in policy_translator:
return True
# Try to validate security configuration
if is_security_path(path):
return True
return False
def translate_security_path(d, path, ps, parms):
u1, component, profile = path.split('.')
if "authSDUProtProfiles" not in d["securityManagerConfiguration"]:
d["securityManagerConfiguration"]["authSDUProtProfiles"] = {}
d = d["securityManagerConfiguration"]["authSDUProtProfiles"]
tr = {'auth': 'authPolicy', 'encrypt': 'encryptPolicy',
'ttl': 'TTLPolicy', 'errorcheck': 'ErrorCheckPolicy'}
if profile == 'default':
if profile not in d:
d["default"] = {}
ps_set(d["default"], tr[component], ps, parms)
else: # profile is the name of a DIF
if "specific" not in d:
d["specific"] = []
j = -1
for i in range(len(d["specific"])):
if d["specific"][i]["underlyingDIF"] == profile + ".DIF":
j = i
break
if j == -1: # We need to create an entry for the new DIF
d["specific"].append({"underlyingDIF" : profile + ".DIF"})
ps_set(d["specific"][j], tr[component], ps, parms)
def translate_policy(difconf, path, ps, parms):
if path =='efcp.*.dtcp':
dtcp_ps_set(difconf, ps, parms)
elif path == 'efcp.*.dtp':
dtp_ps_set(difconf, ps, parms)
elif is_security_path(path):
translate_security_path(difconf, path, ps, parms)
else:
policy_translator[path](difconf, ps, parms)
|
gpl-2.0
| -1,483,902,271,451,091,000
| 30.527221
| 115
| 0.468418
| false
| 3.82447
| true
| false
| false
|
anomalizer/ngx_aws_auth
|
reference-impl-py/reference_v2.py
|
1
|
2701
|
#!/usr/bin/env python
from datetime import datetime
from hashlib import sha1
import hmac
import sys
try:
from urllib.request import Request, urlopen, HTTPError # Python 3
except:
from urllib2 import Request, urlopen, HTTPError # Python 2
'''
Authorization = "AWS" + " " + AWSAccessKeyId + ":" + Signature;
Signature = Base64( HMAC-SHA1( YourSecretAccessKeyID, UTF-8-Encoding-Of( StringToSign ) ) );
StringToSign = HTTP-Verb + "\n" +
Content-MD5 + "\n" +
Content-Type + "\n" +
Date + "\n" +
CanonicalizedAmzHeaders +
CanonicalizedResource;
CanonicalizedResource = [ "/" + Bucket ] +
<HTTP-Request-URI, from the protocol name up to the query string> +
[ subresource, if present. For example "?acl", "?location", "?logging", or "?torrent"];
CanonicalizedAmzHeaders = <described below>
'''
def canon_resource(vhost_mode, bucket, url):
val = "/%s" % bucket if vhost_mode else ""
val = val+url
return val
def str_to_sign_v2(method, vhost_mode, bucket, url):
cr = canon_resource(vhost_mode, bucket, url)
ctype = ""
cmd5 = ""
dt = datetime.utcnow().strftime('%a, %d %b %Y %H:%M:%S GMT')
azh = ""
retval = "%s\n%s\n%s\n%s\n%s%s" % (method,
cmd5, ctype, dt, azh, cr)
headers = {}
headers['Date'] = dt
if vhost_mode:
headers['Host'] = "%s.s3.amazonaws.com" % bucket
return {'s2s': retval, 'headers': headers }
def v2sign(key, method, vhost_mode, bucket, url):
raw = str_to_sign_v2(method, vhost_mode, bucket, url)
print "String to sign is\n----------------------\n%s\n---------------------\n" % raw['s2s']
retval = hmac.new(key, raw['s2s'], sha1)
return {'sign': retval.digest().encode("base64").rstrip("\n"),
'headers': raw['headers']}
def az_h(ak, key, method, vhost_mode, bucket, url):
sig = v2sign(key, method, vhost_mode, bucket, url)
ahv = "AWS %s:%s" % (ak, sig['sign'])
sig['headers']['Authorization'] = ahv
return sig['headers']
def get_data(ak, key, method, vhost_mode, bucket, url):
if vhost_mode:
rurl = "http://%s.s3.amazonaws.com%s" % (bucket, url)
else:
rurl = "http://s3.amazonaws.com%s" % (url)
q = Request(rurl)
headers = az_h(ak, key, method, vhost_mode, bucket, url)
print 'About to make a request'
print url
print headers
for k,v in headers.iteritems():
q.add_header(k, v)
try:
return urlopen(q).read()
except HTTPError as e:
print 'Got exception', e
if __name__ == "__main__":
ak = sys.argv[1]
k = sys.argv[2]
print get_data(ak, k, "GET", True, "hw.anomalizer", "/lock.txt")
print get_data(ak, k, "GET", False, "hw.anomalizer", "/hw.anomalizer/nq.c")
|
bsd-2-clause
| -5,930,532,583,128,790,000
| 29.011111
| 95
| 0.601999
| false
| 2.964874
| false
| false
| false
|
CARocha/simasinnovacion
|
servicios/views.py
|
1
|
1928
|
# -*- coding: utf-8 -*-
from django.shortcuts import render, get_object_or_404
from .models import Servicios
from .forms import ServiciosForm
import json
from django.http import HttpResponse
def _queryset_filtrado(request):
params = {}
if 'tipos_servicios' in request.session:
params['tipos_servicios'] = request.session['tipos_servicios']
if 'temas_abordan' in request.session:
params['temas_abordan'] = request.session['temas_abordan']
if 'org_benefician' in request.session:
params['org_benefician'] = request.session['org_benefician']
if 'fecha' in request.session:
params['fecha'] = request.session['fecha']
unvalid_keys = []
for key in params:
if not params[key]:
unvalid_keys.append(key)
for key in unvalid_keys:
del params[key]
return Servicios.objects.filter(**params)
def servicios_index(request, template="servicios/servicios.html"):
if request.method == 'POST':
form = ServiciosForm(request.POST)
if form.is_valid():
request.session['tipos_servicios'] = form.cleaned_data['tipos_servicios']
request.session['temas_abordan'] = form.cleaned_data['temas_abordan']
request.session['org_benefician'] = form.cleaned_data['org_benefician']
request.session['fecha'] = form.cleaned_data['fecha']
request.session['bandera'] = 1
else:
form = ServiciosForm()
request.session['bandera'] = 0
if request.session['bandera'] == 1:
con = _queryset_filtrado(request)
else:
con = ''
return render(request, template, {'form':form,
'lista_servicios':con})
def servicios_pagina(request, id, template="servicios/ficha_servicios.html"):
servicio = get_object_or_404(Servicios, id=id)
return render(request, template, {'servicio':servicio})
|
mit
| -5,750,363,850,856,495,000
| 36.096154
| 97
| 0.633817
| false
| 3.312715
| false
| false
| false
|
lunixbochs/actualvim
|
lib/asyncio/locks.py
|
1
|
14849
|
"""Synchronization primitives."""
__all__ = ['Lock', 'Event', 'Condition', 'Semaphore', 'BoundedSemaphore']
import collections
from ActualVim.lib.asyncio_inc import compat
from . import events
from . import futures
from .coroutines import coroutine
class _ContextManager:
"""Context manager.
This enables the following idiom for acquiring and releasing a
lock around a block:
with (yield from lock):
<block>
while failing loudly when accidentally using:
with lock:
<block>
"""
def __init__(self, lock):
self._lock = lock
def __enter__(self):
# We have no use for the "as ..." clause in the with
# statement for locks.
return None
def __exit__(self, *args):
try:
self._lock.release()
finally:
self._lock = None # Crudely prevent reuse.
class _ContextManagerMixin:
def __enter__(self):
raise RuntimeError(
'"yield from" should be used as context manager expression')
def __exit__(self, *args):
# This must exist because __enter__ exists, even though that
# always raises; that's how the with-statement works.
pass
@coroutine
def __iter__(self):
# This is not a coroutine. It is meant to enable the idiom:
#
# with (yield from lock):
# <block>
#
# as an alternative to:
#
# yield from lock.acquire()
# try:
# <block>
# finally:
# lock.release()
yield from self.acquire()
return _ContextManager(self)
if compat.PY35:
def __await__(self):
# To make "with await lock" work.
yield from self.acquire()
return _ContextManager(self)
@coroutine
def __aenter__(self):
yield from self.acquire()
# We have no use for the "as ..." clause in the with
# statement for locks.
return None
@coroutine
def __aexit__(self, exc_type, exc, tb):
self.release()
class Lock(_ContextManagerMixin):
"""Primitive lock objects.
A primitive lock is a synchronization primitive that is not owned
by a particular coroutine when locked. A primitive lock is in one
of two states, 'locked' or 'unlocked'.
It is created in the unlocked state. It has two basic methods,
acquire() and release(). When the state is unlocked, acquire()
changes the state to locked and returns immediately. When the
state is locked, acquire() blocks until a call to release() in
another coroutine changes it to unlocked, then the acquire() call
resets it to locked and returns. The release() method should only
be called in the locked state; it changes the state to unlocked
and returns immediately. If an attempt is made to release an
unlocked lock, a RuntimeError will be raised.
When more than one coroutine is blocked in acquire() waiting for
the state to turn to unlocked, only one coroutine proceeds when a
release() call resets the state to unlocked; first coroutine which
is blocked in acquire() is being processed.
acquire() is a coroutine and should be called with 'yield from'.
Locks also support the context management protocol. '(yield from lock)'
should be used as the context manager expression.
Usage:
lock = Lock()
...
yield from lock
try:
...
finally:
lock.release()
Context manager usage:
lock = Lock()
...
with (yield from lock):
...
Lock objects can be tested for locking state:
if not lock.locked():
yield from lock
else:
# lock is acquired
...
"""
def __init__(self, *, loop=None):
self._waiters = collections.deque()
self._locked = False
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
def __repr__(self):
res = super().__repr__()
extra = 'locked' if self._locked else 'unlocked'
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
def locked(self):
"""Return True if lock is acquired."""
return self._locked
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
finally:
self._waiters.remove(fut)
def release(self):
"""Release a lock.
When the lock is locked, reset it to unlocked, and return.
If any other coroutines are blocked waiting for the lock to become
unlocked, allow exactly one of them to proceed.
When invoked on an unlocked lock, a RuntimeError is raised.
There is no return value.
"""
if self._locked:
self._locked = False
# Wake up the first waiter who isn't cancelled.
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
else:
raise RuntimeError('Lock is not acquired.')
class Event:
"""Asynchronous equivalent to threading.Event.
Class implementing event objects. An event manages a flag that can be set
to true with the set() method and reset to false with the clear() method.
The wait() method blocks until the flag is true. The flag is initially
false.
"""
def __init__(self, *, loop=None):
self._waiters = collections.deque()
self._value = False
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
def __repr__(self):
res = super().__repr__()
extra = 'set' if self._value else 'unset'
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
def is_set(self):
"""Return True if and only if the internal flag is true."""
return self._value
def set(self):
"""Set the internal flag to true. All coroutines waiting for it to
become true are awakened. Coroutine that call wait() once the flag is
true will not block at all.
"""
if not self._value:
self._value = True
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
def clear(self):
"""Reset the internal flag to false. Subsequently, coroutines calling
wait() will block until set() is called to set the internal flag
to true again."""
self._value = False
@coroutine
def wait(self):
"""Block until the internal flag is true.
If the internal flag is true on entry, return True
immediately. Otherwise, block until another coroutine calls
set() to set the flag to true, then return True.
"""
if self._value:
return True
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
return True
finally:
self._waiters.remove(fut)
class Condition(_ContextManagerMixin):
"""Asynchronous equivalent to threading.Condition.
This class implements condition variable objects. A condition variable
allows one or more coroutines to wait until they are notified by another
coroutine.
A new Lock object is created and used as the underlying lock.
"""
def __init__(self, lock=None, *, loop=None):
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
if lock is None:
lock = Lock(loop=self._loop)
elif lock._loop is not self._loop:
raise ValueError("loop argument must agree with lock")
self._lock = lock
# Export the lock's locked(), acquire() and release() methods.
self.locked = lock.locked
self.acquire = lock.acquire
self.release = lock.release
self._waiters = collections.deque()
def __repr__(self):
res = super().__repr__()
extra = 'locked' if self.locked() else 'unlocked'
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
@coroutine
def wait(self):
"""Wait until notified.
If the calling coroutine has not acquired the lock when this
method is called, a RuntimeError is raised.
This method releases the underlying lock, and then blocks
until it is awakened by a notify() or notify_all() call for
the same condition variable in another coroutine. Once
awakened, it re-acquires the lock and returns True.
"""
if not self.locked():
raise RuntimeError('cannot wait on un-acquired lock')
self.release()
try:
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
return True
finally:
self._waiters.remove(fut)
finally:
# Must reacquire lock even if wait is cancelled
while True:
try:
yield from self.acquire()
break
except futures.CancelledError:
pass
@coroutine
def wait_for(self, predicate):
"""Wait until a predicate becomes true.
The predicate should be a callable which result will be
interpreted as a boolean value. The final predicate value is
the return value.
"""
result = predicate()
while not result:
yield from self.wait()
result = predicate()
return result
def notify(self, n=1):
"""By default, wake up one coroutine waiting on this condition, if any.
If the calling coroutine has not acquired the lock when this method
is called, a RuntimeError is raised.
This method wakes up at most n of the coroutines waiting for the
condition variable; it is a no-op if no coroutines are waiting.
Note: an awakened coroutine does not actually return from its
wait() call until it can reacquire the lock. Since notify() does
not release the lock, its caller should.
"""
if not self.locked():
raise RuntimeError('cannot notify on un-acquired lock')
idx = 0
for fut in self._waiters:
if idx >= n:
break
if not fut.done():
idx += 1
fut.set_result(False)
def notify_all(self):
"""Wake up all threads waiting on this condition. This method acts
like notify(), but wakes up all waiting threads instead of one. If the
calling thread has not acquired the lock when this method is called,
a RuntimeError is raised.
"""
self.notify(len(self._waiters))
class Semaphore(_ContextManagerMixin):
"""A Semaphore implementation.
A semaphore manages an internal counter which is decremented by each
acquire() call and incremented by each release() call. The counter
can never go below zero; when acquire() finds that it is zero, it blocks,
waiting until some other thread calls release().
Semaphores also support the context management protocol.
The optional argument gives the initial value for the internal
counter; it defaults to 1. If the value given is less than 0,
ValueError is raised.
"""
def __init__(self, value=1, *, loop=None):
if value < 0:
raise ValueError("Semaphore initial value must be >= 0")
self._value = value
self._waiters = collections.deque()
if loop is not None:
self._loop = loop
else:
self._loop = events.get_event_loop()
def __repr__(self):
res = super().__repr__()
extra = 'locked' if self.locked() else 'unlocked,value:{}'.format(
self._value)
if self._waiters:
extra = '{},waiters:{}'.format(extra, len(self._waiters))
return '<{} [{}]>'.format(res[1:-1], extra)
def _wake_up_next(self):
while self._waiters:
waiter = self._waiters.popleft()
if not waiter.done():
waiter.set_result(None)
return
def locked(self):
"""Returns True if semaphore can not be acquired immediately."""
return self._value == 0
@coroutine
def acquire(self):
"""Acquire a semaphore.
If the internal counter is larger than zero on entry,
decrement it by one and return True immediately. If it is
zero on entry, block, waiting until some other coroutine has
called release() to make it larger than 0, and then return
True.
"""
while self._value <= 0:
fut = self._loop.create_future()
self._waiters.append(fut)
try:
yield from fut
except:
# See the similar code in Queue.get.
fut.cancel()
if self._value > 0 and not fut.cancelled():
self._wake_up_next()
raise
self._value -= 1
return True
def release(self):
"""Release a semaphore, incrementing the internal counter by one.
When it was zero on entry and another coroutine is waiting for it to
become larger than zero again, wake up that coroutine.
"""
self._value += 1
self._wake_up_next()
class BoundedSemaphore(Semaphore):
"""A bounded semaphore implementation.
This raises ValueError in release() if it would increase the value
above the initial value.
"""
def __init__(self, value=1, *, loop=None):
self._bound_value = value
super().__init__(value, loop=loop)
def release(self):
if self._value >= self._bound_value:
raise ValueError('BoundedSemaphore released too many times')
super().release()
|
mit
| -7,098,526,981,324,719,000
| 30.064854
| 79
| 0.576739
| false
| 4.491531
| false
| false
| false
|
podhmo/cssdiff
|
cssdiff/__init__.py
|
1
|
4506
|
# -*- coding:utf-8 -*-
import sys
import cssutils
from collections import defaultdict
VERBOSE = False
class DiffObject(object):
def __init__(self, src, dst):
self.src = src
self.dst = dst
self.merged = full_difference(src, dst)
def to_string(self):
buf = []
for style, diff_line_list in sorted(self.merged.items()):
buf.append("{style} {{".format(style=style))
for diff_line in diff_line_list:
op = diff_line[0]
if op == "-" or op == "+":
buf.append("{op} {name}: {value};".format(op=op, name=diff_line[1], value=diff_line[2]))
elif op == "->":
buf.append("- {name}: {value};".format(op=op, name=diff_line[1], value=diff_line[2]))
buf.append("+ {name}: {value};".format(op=op, name=diff_line[1], value=diff_line[3]))
buf.append("}\n")
return "\n".join(buf)
class Element(object):
def __init__(self, sheet, structure=None, verbose=True):
self.sheet = sheet
self.structure = structure or to_dict(self.sheet, verbose)
self.verbose = verbose
def simplify(self):
return self.__class__(self.sheet, simplify(self.structure), verbose=self.verbose)
def difference(self, other):
src = simplify(self.structure)
dst = simplify(other.structure)
return DiffObject(src, dst)
def loads(css, verbose=VERBOSE):
sheet = cssutils.parseString(css, validate=verbose)
return Element(sheet, verbose=verbose)
def load(rf, verbose=VERBOSE):
return loads(rf.read(), verbose=verbose)
def load_from_file(filename, verbose=VERBOSE):
with open(filename) as rf:
return load(rf)
def describe(sheet):
for rule in sheet:
print("S")
for selector in rule.selectorList:
print("\t{}".format(selector.selectorText))
print("R")
for prop in rule.style:
print("\t{} {}".format(prop.name, prop.value))
print("-")
def simplify(structure):
return {k1: {k2: vs[-1] for k2, vs in sd.items()} for k1, sd in structure.items()}
def full_difference(src, dst):
merged = defaultdict(list)
added_or_changed = difference(dst, src, op="+", iterate=lambda x: x.items())
deleted_or_changed = difference(src, dst, op="-", iterate=lambda x: x.items())
for k, vs in added_or_changed.items():
merged[k].extend(vs)
for k, vs in deleted_or_changed.items():
for v in vs:
if v[0] == '-':
merged[k].append(v)
return merged
def difference(s1, s2, op="+", iterate=lambda s: sorted(s.items())):
"""s1 - s2"""
def change(name, x, y):
return ("->", name, x, y)
def add(name, v):
return (op, name, v)
def addall(rules):
return [add(name, value) for name, value in iterate(rules)]
# Dict[style, Dict[name, value]]
d = defaultdict(list)
for style, rules in iterate(s1):
another_rules = s2.get(style)
if another_rules is None:
d[style].extend(addall(rules))
continue
for name, value in iterate(rules):
another_value = another_rules.get(name)
if another_value is None:
d[style].append(add(name, value))
elif value != another_value:
d[style].append(change(name, another_value, value))
return d
def to_dict(sheet, verbose=True):
d = defaultdict(lambda: defaultdict(list))
for rule in sheet:
if not hasattr(rule, "selectorList"):
if verbose:
sys.stderr.write("hmm: {}\n".format(type(rule)))
continue
for selector in rule.selectorList:
sd = d[selector.selectorText]
for prop in rule.style:
sd[prop.name].append(prop.value)
return d
# todo: remove
def pp(d):
def default(o):
return o.structure
import json
print(json.dumps(d, indent=2, default=default))
def main():
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("src", type=argparse.FileType('r'))
parser.add_argument("dst", type=argparse.FileType('r'))
parser.add_argument("--verbose", action="store_true", default=False)
args = parser.parse_args()
s0 = load(args.src, verbose=args.verbose)
s1 = load(args.dst, verbose=args.verbose)
print(s0.difference(s1).to_string())
if __name__ == "__main__":
main()
|
mit
| 8,262,920,245,644,976,000
| 29.04
| 109
| 0.580781
| false
| 3.619277
| false
| false
| false
|
cristian99garcia/showntell-activity
|
slideshow.py
|
1
|
25638
|
# -*- mode:python; tab-width:4; indent-tabs-mode:t; -*-
# slideshow.py
#
# Classes to represent a deck of slides, and handle things like file I/O and
# formats
# B. Mayton <bmayton@cs.washington.edu>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import os
import sys
import subprocess
from time import strftime
import xml.dom.minidom
import logging
from gi.repository import Gtk
from gi.repository import GObject
from gi.repository import GdkPixbuf
from path import path
from sugar3.activity import activity
from sugar3.datastore import datastore
class Deck(GObject.GObject):
__gsignals__ = {
'slide-changed': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'decktitle-changed': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'slide-redraw': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'remove-path': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_INT,)),
'deck-changed': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'local-ink-added': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_STRING,)),
'remote-ink-added': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_STRING,)),
'instr-state-propagate': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_BOOLEAN,)),
'lock-state-propagate': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_BOOLEAN,)),
'ink-submitted': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_STRING, GObject.TYPE_STRING)),
'ink-broadcast': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE,
(GObject.TYPE_STRING, GObject.TYPE_STRING, GObject.TYPE_STRING)),
'update-submissions': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_INT,)),
'instructor-ink-cleared': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_INT,)),
'instructor-ink-removed': (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, (GObject.TYPE_INT, GObject.TYPE_INT)),
}
def __init__(self, sugaractivity, handle, rsrc, base="/nfs/show"):
GObject.GObject.__init__(self)
self.__handle = handle
if self.__handle.object_id is None:
print 'slideshow - from home view'
else:
obj = datastore.get(self.__handle.object_id)
print 'object:', obj.get_file_path()
self.__logger = logging.getLogger('Deck')
self.__base = base
self.__rsrc = rsrc
self.__activity = sugaractivity
self.__is_initiating = True
self.__nav_locked = False
self.__active_sub = -1
self.__self_text = ""
self.__text_tag = None
self.__xmlpath = os.path.join(base, "deck.xml")
# we always create a new presentation and copy over it on resume
if path(base).exists():
# we can't have permissions.info for this to work
subprocess.call(
"cp -r " +
base +
" " +
os.path.expanduser("~/save"),
shell=True)
subprocess.call("rm -rf " + base + '/*', shell=True)
else:
path.mkdir(base)
path.copy(self.__rsrc / 'deck.xml', base / 'deck.xml')
path.copy(self.__rsrc / 'title.html', base / 'title.html')
path.copy(self.__rsrc / 'title_thumb.png', base / 'title_thumb.png')
self.reload()
self.set_title('New')
def set_locked_mode(self, locked):
""" Setter method for the navigation lock flag"""
self.__logger.debug("Lock state: " + str(locked))
self.__nav_locked = locked
self.emit('lock-state-propagate', locked)
def set_is_initiating(self, is_init):
""" Setter method for the instructor flag """
self.__logger.debug("Instructor state: " + str(is_init))
self.__is_initiating = is_init
self.emit('instr-state-propagate', is_init)
def getIsInitiating(self):
return self.__is_initiating
def make_title_slide(self, title):
# open and read title.html
self.__work_path = os.path.join(
activity.get_activity_root(), 'instance')
deckpath = path(activity.get_activity_root()) / 'instance' / 'deck'
slide = open(deckpath / 'title.html', 'r')
txt = slide.read()
slide.close()
# here change title.html - change between <h1> and </h1>
h1pos = txt.find('<h1>')
h1end = txt.find('</h1>')
txtmod = txt[:h1pos + 4] + title + txt[h1end:]
# here change date - change between <h3> and </h3>
h3pos = txtmod.find('<h3>')
h3end = txtmod.find('</h3>')
txt = txtmod[:h3pos + 4] + \
strftime("%a, %b %d, %Y %H:%M") + txtmod[h3end:]
# save title.html and close
slide = open(deckpath / 'title.html', 'w')
slide.write(txt)
slide.close()
print 'title slide changed', title
def set_title(self, title):
nodes = self.__dom.getElementsByTagName("title")
nodes[0].firstChild.data = title
self.make_title_slide(title)
self.save()
self.goToIndex(0, is_local=False)
self.emit('deck-changed')
print 'set_title', self.get_title()
def get_title(self):
nodes = self.__dom.getElementsByTagName("title")
return nodes[0].firstChild.data
def reload(self):
self.__logger.debug("Reading deck")
print 'reload:', self.__xmlpath
if os.path.exists(self.__xmlpath):
self.__dom = xml.dom.minidom.parse(self.__xmlpath)
decks = self.__dom.getElementsByTagName("deck")
self.__deck = decks[0]
# Get the slides from the show
self.__slides = self.__deck.getElementsByTagName("slide")
self.__nslides = len(self.__slides)
self.__logger.debug(str(self.__nslides) + " slides in show")
self.goToIndex(0, is_local=False)
self.emit('deck-changed')
print 'deck reloaded'
def save(self, path=None):
"""Writes the XML DOM in memory out to disk"""
print 'save:', path
if not path:
path = self.__xmlpath
"""
print '***************save************************'
print self.__dom.toprettyxml()
print '***************save************************'
"""
outfile = open(path, "w")
self.__dom.writexml(outfile)
outfile.close()
def rebuild_dom(self, title, slides):
dom = xml.dom.minidom.Document()
deck = dom.createElement("deck")
title = dom.createElement("title")
title.appendChild(dom.createTextNode("new"))
deck.appendChild(title)
for slide in slides:
deck.appendChild(slide)
dom.appendChild(deck)
print '*************rebuild**************************'
print dom.toprettyxml()
print '**********************************************'
return dom
def getDeckPath(self):
"""Returns the path to the folder that stores this slide deck"""
return self.__base
def resizeImage(self, inpath, outpath, w, h):
# resize an image
pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(inpath, w, h)
#scaled_buf = pixbuf.scale.simple(w, h, Gtk.gdk.INTERP_BILINEAR)
pixbuf.save(outpath, "png")
def get_SlideTitle(self):
n = self.getIndex()
slide = self.__slides[n]
return slide.getAttribute('title')
def set_SlideTitle(self, slideTitle):
n = self.getIndex()
slide = self.__slides[n]
slide.setAttribute('title', slideTitle)
def addSlide(self, file_path):
INSTANCE = path(activity.get_activity_root()) / 'instance'
filepath = path(file_path)
print 'addSlide file_path', filepath.exists(), filepath
filename = filepath.name
inpath = INSTANCE / 'deck' / filename
print 'inpath', inpath.exists(), inpath
path.copy(filepath, inpath)
outpath = path(activity.get_activity_root()) / \
'instance' / 'deck' / filename
print 'outpath=', outpath.exists(), outpath
self.resizeImage(inpath, outpath, 640, 480)
print 'outpath=', outpath.exists(), outpath
print 'get slide dimensions'
dims = self.getSlideDimensionsFromXML(0)
if not dims:
wf = 640
hf = 480
else:
wf, hf = dims
w = str(int(wf))
h = str(int(hf))
print 'add slide', w, h
newslide = self.__dom.createElement("slide")
newslide.setAttribute("height", h)
newslide.setAttribute("title", "newslide")
newslide.setAttribute("width", w)
newlayer = self.__dom.createElement("layer")
txt = self.__dom.createTextNode(filename)
newlayer.appendChild(txt)
newslide.appendChild(newlayer)
self.__deck.appendChild(newslide)
print '**************addSlide*************'
print self.__dom.toprettyxml()
print '***********************************'
self.save()
def removeSlide(self, n):
del self.__slides[n]
self.__dom = self.rebuild_dom("modified deck", self.__slides)
def moveSlide(self, f, t):
if f < t:
self.__slides.insert(t, self.__slides[f])
del self.__slides[f]
elif t < f:
self.__slides.insert(t, self.__slides[f])
del self.__slides[f + 1]
self.__dom = self.rebuild_dom("modified deck", self.__slides)
def getSlideLayers(self, n=-1):
"""Returns a list of the layers that comprise this slide"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
self.__layers = slide.getElementsByTagName("layer")
layers = []
for l in self.__layers:
p = os.path.join(self.__base, l.firstChild.nodeValue)
layers.append(p)
return layers
def getInstructorInk(self):
self.__instructor_ink = []
instr = self.__slide.getElementsByTagName("instructor")
if len(instr) > 0:
self.__instructor_tag = instr[0]
pathtags = self.__instructor_tag.getElementsByTagName("path")
for pathstr in pathtags:
self.__instructor_ink.append(pathstr.firstChild.nodeValue)
return self.__instructor_ink
def getSelfInkOrSubmission(self):
if self.__active_sub == -1:
return (self.__self_ink, self.__self_text)
subtags = self.__slide.getElementsByTagName("submission")
if self.__active_sub > -1 and self.__active_sub < len(subtags):
active_subtag = subtags[self.__active_sub]
text = ""
texts = active_subtag.getElementsByTagName("text")
if len(texts) > 0:
if texts[0].firstChild:
text = texts[0].firstChild.nodeValue
pathlist = []
paths = active_subtag.getElementsByTagName("path")
for path in paths:
if path.firstChild:
pathlist.append(path.firstChild.nodeValue)
return (pathlist, text)
return None
def setActiveSubmission(self, sub):
self.__active_sub = sub
self.emit('slide-redraw')
def getActiveSubmission(self):
return self.__active_sub
def getSubmissionList(self, n=None):
if n is None:
n = self.__pos
subtags = self.__slide.getElementsByTagName("submission")
sublist = []
for subtag in subtags:
sublist.append(subtag.getAttribute("from"))
return sublist
def addSubmission(self, whofrom, inks, text="", n=None):
if n is None:
n = self.__pos
if n >= 0 and n < self.getSlideCount():
slide = self.__slides[n]
else:
slide = self.__slides[self.__pos]
newsub = self.__dom.createElement("submission")
newsub.setAttribute("from", whofrom)
substrparts = inks.split("$")
for part in substrparts:
if len(part) > 0:
newpath = self.__dom.createElement("path")
newpath.appendChild(self.__dom.createTextNode(part))
newsub.appendChild(newpath)
subtext = self.__dom.createElement("text")
subtext.appendChild(self.__dom.createTextNode(text))
newsub.appendChild(subtext)
subs = slide.getElementsByTagName("submission")
for sub in subs:
if sub.getAttribute("from") == whofrom:
slide.removeChild(sub)
slide.appendChild(newsub)
subs = slide.getElementsByTagName("submission")
if n == self.__pos:
self.emit('update-submissions', len(subs) - 1)
def addInkToSlide(self, pathstr, islocal, n=None):
"""Adds ink to the current slide, or slide n if given. Instructor ink may be added to any slide;
but it only makes sense to add student ink to the current slide (n will be ignored)"""
if n is None:
slide = self.__slide
instr_tag = self.__instructor_tag
if instr_tag is None:
instr_tag = self.__dom.createElement("instructor")
slide.appendChild(instr_tag)
self.__instructor_tag = instr_tag
else:
if n < self.getSlideCount and n >= 0:
slide = self.__slides[n]
else:
slide = self.__slides[self.__pos]
instr_tags = slide.getElementsByTagName("instructor")
if len(instr_tags) > 0:
instr_tag = instr_tags[0]
else:
instr_tag = self.__dom.createElement("instructor")
slide.appendChild(instr_tag)
if not islocal or self.__is_initiating:
self.__instructor_ink.append(pathstr)
path = self.__dom.createElement("path")
path.appendChild(self.__dom.createTextNode(pathstr))
instr_tag.appendChild(path)
else:
self.__self_ink.append(pathstr)
if not self.__self_ink_tag:
self.__self_ink_tag = self.__dom.createElement("self")
self.__slide.appendChild(self.__self_ink_tag)
path = self.__dom.createElement("path")
path.appendChild(self.__dom.createTextNode(pathstr))
self.__self_ink_tag.appendChild(path)
if islocal:
self.emit("local-ink-added", pathstr)
else:
if n is None or n == self.__pos:
self.emit("remote-ink-added", pathstr)
def clearInk(self, n=None):
if n is None:
n = self.__pos
slide = self.__slides[n]
if self.__is_initiating:
self.clearInstructorInk(n)
self.emit('instructor-ink-cleared', n)
self_tags = slide.getElementsByTagName("self")
for self_tag in self_tags:
slide.removeChild(self_tag)
self.__self_ink = []
self.__self_ink_tag = None
def clearInstructorInk(self, n=None):
if n is None:
n = self.__pos
slide = self.__slides[n]
instructor_tags = slide.getElementsByTagName("instructor")
for instructor_tag in instructor_tags:
slide.removeChild(instructor_tag)
if n == self.__pos:
self.__instructor_ink = []
self.__instructor_tag = None
self.emit('slide-redraw')
def removeInstructorPathByUID(self, uid, n=None):
if n is None:
n = self.__pos
needs_redraw = False
slide = self.__slides[n]
instructor_tags = slide.getElementsByTagName("instructor")
if len(instructor_tags) > 0:
instructor_tag = instructor_tags[0]
else:
return
path_tags = instructor_tag.getElementsByTagName("path")
for path_tag in path_tags:
if path_tag.firstChild:
pathstr = path_tag.firstChild.nodeValue
path_uid = 0
try:
path_uid = int(pathstr[0:pathstr.find(';')])
except Exception as e:
pass
if path_uid == uid:
instructor_tag.removeChild(path_tag)
needs_redraw = True
if n == self.__pos and needs_redraw:
self.emit('remove-path', uid)
def removeLocalPathByUID(self, uid, n=None):
if n is None:
n = self.__pos
slide = self.__slides[n]
if self.__is_initiating:
self.emit('instructor_ink_removed', uid, n)
tags = slide.getElementsByTagName("instructor")
else:
tags = slide.getElementsByTagName("self")
if len(tags) > 0:
tag = tags[0]
else:
return
path_tags = tag.getElementsByTagName("path")
for path_tag in path_tags:
if path_tag.firstChild:
pathstr = path_tag.firstChild.nodeValue
path_uid = 0
try:
path_uid = int(pathstr[0:pathstr.find(';')])
except Exception as e:
pass
if path_uid == uid:
tag.removeChild(path_tag)
def doSubmit(self):
inks, text, whofrom = self.getSerializedInkSubmission()
self.__logger.debug("Submitting ink: " + str(inks) + " text: " + text)
self.emit('ink-submitted', inks, text)
def doBroadcast(self):
inks, text, whofrom = self.getSerializedInkSubmission()
self.emit('ink-broadcast', whofrom, inks, text)
def getSerializedInkSubmission(self):
sub = ""
text = ""
if self.__active_sub == -1:
self_tags = self.__slide.getElementsByTagName("self")
if len(self_tags) > 0:
texts = self_tags[0].getElementsByTagName("text")
if len(texts) > 0:
if texts[0].firstChild:
text = texts[0].firstChild.nodeValue
for path in self_tags[0].getElementsByTagName("path"):
sub = sub + path.firstChild.nodeValue + "$"
return sub, text, "myself"
else:
sub = ""
whofrom = "unknown"
subtags = self.__slide.getElementsByTagName("submission")
if self.__active_sub > -1 and self.__active_sub < len(subtags):
active_subtag = subtags[self.__active_sub]
text = ""
whofrom = active_subtag.getAttribute("from")
texts = active_subtag.getElementsByTagName("text")
if len(texts) > 0:
if texts[0].firstChild:
text = texts[0].firstChild.nodeValue
pathlist = []
paths = active_subtag.getElementsByTagName("path")
for path in paths:
if path.firstChild:
sub = sub + path.firstChild.nodeValue + "$"
return sub, text, whofrom
def getSlideThumb(self, n=-1):
"""Returns the full path to the thumbnail for this slide if it is defined; otherwise False"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
print slide.toprettyxml()
thumbs = slide.getElementsByTagName("thumb")
if len(thumbs) < 1:
return False
return os.path.join(self.__base, thumbs[0].firstChild.nodeValue)
def setSlideThumb(self, filename, n=-1):
"""Sets the thumbnail for this slide to filename (provide a *relative* path!)"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
thumbs = slide.getElementsByTagName("thumb")
for t in thumbs:
slide.removeChild(t)
thumb = self.__dom.createElement("thumb")
thumb.appendChild(self.__dom.createTextNode(filename))
slide.appendChild(thumb)
def getSlideClip(self, n=-1):
"""Returns the full path to the audio clip for this slide if it is defined; otherwise False"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
clip = slide.getElementsByTagName("clip")
if len(clip) < 1:
return False
return os.path.join(self.__base, clip[0].firstChild.nodeValue)
def setSlideClip(self, filename, n=-1):
"""Sets the clip for this slide to filename (provide a *relative* path!)"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
clips = slide.getElementsByTagName("clip")
for clip in clips:
slide.removeChild(clip)
clip = self.__dom.createElement("clip")
clip.appendChild(self.__dom.createTextNode(filename))
slide.appendChild(clip)
def setSlideText(self, textval):
self.__self_text = textval
if self.__text_tag:
if self.__text_tag.firstChild:
self.__text_tag.firstChild.nodeValue = textval
else:
self.__text_tag.appendChild(self.__dom.createTextNode(textval))
def doNewIndex(self):
"""Updates any necessary state associated with moving to a new slide"""
self.__slide = self.__slides[self.__pos]
# set slide title - entry text from xml
self.set_SlideTitle(self.__slide.getAttribute('title'))
self_ink = self.__slide.getElementsByTagName("self")
self.__instructor_tag = None
self.__self_ink_tag = None
self.__instructor_ink = []
self.__self_ink = []
self.__self_text = ""
self.__text_tag = None
self.__active_sub = -1
if len(self_ink) > 0:
self.__self_ink_tag = self_ink[0]
texttags = self.__self_ink_tag.getElementsByTagName("text")
if len(texttags) > 0:
self.__text_tag = texttags[0]
else:
self.__text_tag = self.__dom.createElement(text)
self.__text_tag.appendChild(self.__dom.createTextNode(""))
self.__self_ink_tag.appendChild(text)
pathtags = self.__self_ink_tag.getElementsByTagName("path")
for pathstr in pathtags:
self.__self_ink.append(pathstr.firstChild.nodeValue)
else:
self.__self_ink_tag = self.__dom.createElement("self")
self.__slide.appendChild(self.__self_ink_tag)
self.__text_tag = self.__dom.createElement("text")
self.__text_tag.appendChild(self.__dom.createTextNode(""))
self.__self_ink_tag.appendChild(self.__text_tag)
if self.__text_tag.firstChild:
self.__self_text = self.__text_tag.firstChild.nodeValue
self.__activity.set_screen(0)
self.emit("slide-changed")
self.emit("update-submissions", self.__active_sub)
self.emit("slide-redraw")
def goToIndex(self, index, is_local):
"""Jumps to the slide at the given index, if it's valid"""
self.__logger.debug(
"Trying to change slides: locked? %u, instructor? %u, is_local? %u",
self.__nav_locked,
self.__is_initiating,
is_local)
in_range = index < self.__nslides and index >= 0
if (self.__is_initiating or not is_local or not self.__nav_locked) and in_range:
self.__logger.debug("Changing slide to index: %u", index)
self.__pos = index
self.doNewIndex()
else:
self.__pos = index
print 'invalid index', index
def getIndex(self):
"""Returns the index of the current slide"""
return self.__pos
def next(self):
"""Moves to the next slide"""
self.goToIndex(self.__pos + 1, is_local=True)
def previous(self):
"""Moves to the previous slide"""
self.goToIndex(self.__pos - 1, is_local=True)
def isAtBeginning(self):
"""Returns true if show is on the first slide in the deck"""
if self.__nslides < 1:
return True
if self.__pos == 0:
return True
else:
return False
def isAtEnd(self):
"""Returns true if the show is at the last slide in the deck"""
if self.__nslides < 1:
return True
if self.__pos == self.__nslides - 1:
return True
else:
return False
def getSlideDimensionsFromXML(self, n=-1):
"""Returns the dimensions for the slide at index n, if they're specified"""
if n == -1:
n = self.__pos
slide = self.__slides[n]
wstring = slide.getAttribute("width")
hstring = slide.getAttribute("height")
if wstring != '' and hstring != '':
return [float(wstring), float(hstring)]
return False
def getSlideCount(self):
return self.__nslides
GObject.type_register(Deck)
|
gpl-2.0
| -5,962,084,091,396,877,000
| 37.728097
| 117
| 0.562056
| false
| 3.984768
| false
| false
| false
|
lornemcintosh/OptAnim
|
optanim/animation.py
|
1
|
21918
|
# -------------------------------------------------------------------------
# Copyright (c) 2010-2012 Lorne McIntosh
#
# This file is part of OptAnim.
#
# OptAnim is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OptAnim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with OptAnim. If not, see <http://www.gnu.org/licenses/>.
# -------------------------------------------------------------------------
'''
OptAnim, animation module
'''
from __future__ import division
import math
import copy
import itertools
import re
import time
import logging
import numpy
import cma
from exporters import *
from joints import *
from specifier import *
from threadpool import *
from utils import *
LOG = logging.getLogger(__name__)
pool = ThreadPool()
class ParameterSpace(object):
"""
A utility class that makes it easy to specify and generate large sets of
character animations using specifiers in a parameterized, combinatorial way
Examples
--------
Create a new ParameterSpace:
>>> space = ParameterSpace(Name='space')
Add 1 dimension to space, with 1 set of 1 specifier
>>> space.add_dimension( [[ a ]] )
Result: 1 animation: (a)
Add 1 dimension to space, with 1 set of 2 specifiers
>>> space.add_dimension( [[ a, b ]] )
Result: 1 animation: (a,b)
Add 1 dimension to space, with 2 sets of specifiers
>>> space.add_dimension( [[ a, b ], [ c ]] )
Result: 2 animations: (a,b) (c)
Add 2 dimensions to space (2 sets, 1 set)
>>> space.add_dimension( [[ a, b ], [ c ]] )
>>> space.add_dimension( [[ d ]] )
Result: 2 animations (2x1): (a,b,d) (c,d)
Add 2 dimensions to space (2 sets, 2 sets)
>>> space.add_dimension( [[ a, b ], [ c ]] )
>>> space.add_dimension( [[ d ], [ e ]] )
Result: 4 animations (2x2): (a,b,d) (a,b,e) (c,d) (c,e)
Add 3 dimensions to space (2 sets, 2 sets, 2 sets)
>>> space.add_dimension( [[ a, b ], [ c ]] )
>>> space.add_dimension( [[ d ], [ e ]] )
>>> space.add_dimension( [[ f ], [ g ]] )
Result: 8 animations (2x2x2): (a,b,d,f) (a,b,d,g) (a,b,e,f) (a,b,e,g)
(c,d,f) (c,d,g) (c,e,f) (c,e,g)
"""
def __init__(self, Name, Length=None, FPS=25):
'''Constructor'''
self.Name = Name
self.Length = Length
self.FPS = FPS
self.ContactTimesDict = None
self.DimensionList = []
self.CharacterList = []
self.AnimationList = []
def set_length(self, length):
self.Length = float(length)
def set_contact_times(self, dict):
self.ContactTimesDict = dict
def get_frame_length(self):
return float(1.0 / self.FPS)
def get_frame_count(self):
return int(round(self.Length * self.FPS))
def add_character(self, character):
self.CharacterList.append(character)
def add_dimension(self, dim):
'''Adds a dimension to the ParameterSpace'''
self.DimensionList.append(dim)
def get_num_combinations(self):
ret = len(self.CharacterList)
for dim in self.DimensionList:
ret *= max(len(dim), 1)
return ret
def get_animations_with_tag(self, tag):
return [anim for anim in self.AnimationList if anim.has_tag(tag)]
def generate(self, solver='ipopt'):
#print a helpful message about the number of combinations generated
LOG.info("Generating %s (%i combinations)" % (self.Name, self.get_num_combinations()))
#make an anim for each combination of characters/specifiers
for character in self.CharacterList:
for index, comb in enumerate(itertools.product(*self.DimensionList)):
#build out constraint and objective lists
paramList = list(itertools.chain.from_iterable(comb))
animSpecifierList = character.SpecifierList + paramList
#create an animation instance
animName = character.Name + "_" + self.Name + "_" + str(index)
anim = Animation(animName, self.Length, self.FPS, character,
animSpecifierList, self.ContactTimesDict);
self.AnimationList.append(anim)
anim.optimize(solver) #non-blocking
def wait_for_results(self):
'''Polls the animations and returns when they're all done'''
alldone = False
while(alldone is False):
alldone = True
for anim in self.AnimationList:
if anim.Done is False:
alldone = False
time.sleep(1)
break
def export(self, outdir):
'''Exports all the animations that solved'''
self.wait_for_results()
for anim in self.AnimationList:
if anim.Solved:
anim.export(outdir)
class Animation(object):
'''Represents a specific character animation. The character and constraints
etc. are set in stone. If solved, it also stores the optimization results (the
animation data)'''
def __init__(self, Name, Length, FPS, Character, SpecifierList, ContactTimes):
'''Constructor'''
self.Name = Name
self.Length = Length
self.FPS = FPS
self.Character = Character
self.SpecifierList = SpecifierList
self.ContactTimesDict = ContactTimes
self.Done = False
self.Solved = False
self.ObjectiveValue = numpy.NaN
self.AnimationData = {}
self.CachedConstraintList = []
self.CachedObjectiveList = []
def __str__(self):
return self.Name + " (Length=" + str(self.Length) + ", FPS=" + str(self.FPS) + ", frame_count=" + str(self.get_frame_count()) + ")"
def get_frame_length(self):
return float(1.0 / self.FPS)
def get_frame_count(self):
return int(round(self.Length * self.FPS))
def get_contact_frames(self, joint):
try:
footsteps = self.ContactTimesDict[joint]
contactSet = set()
for step in footsteps:
startTime, intervalTime = [x * self.Length for x in step] #convert from fraction of length to real seconds
#TODO: goofy 'double rounding' here is to avoid small floating-point errors; use decimal package instead?
intervalFrames = int(round(round(intervalTime * self.FPS, 1)))
startFrame = int(round(round(startTime * self.FPS, 1)))
endFrame = startFrame + intervalFrames
contactSet = contactSet | set([x % self.get_frame_count() for x in range(startFrame, endFrame)]) #loop
return contactSet
except KeyError:
raise BaseException('Character "%s" has a contact joint "%s". You must specify timings for %s.' % (self.Character.Name, joint.Name, joint.Name))
def has_tag(self, tag):
'''Returns True if this animation has the specified tag; False otherwise'''
return tag in self.SpecifierList
def get_frame_slice(self, firstFrame, lastFrame):
'''Returns a new Animation containing just the frames between firstFrame and lastFrame'''
#clamp frames
firstFrame = max(0, firstFrame)
lastFrame = min(self.get_frame_count(), lastFrame)
newName = self.Name + "_" + str(firstFrame) + "to" + str(lastFrame)
newLength = (lastFrame-firstFrame+1)*self.get_frame_length()
ret = Animation(newName, newLength, self.FPS, self.Character, None, None)
#setup animation data
ret.AnimationData = {}
for body in ret.Character.BodyList:
ret.AnimationData[str(body.Name)] = [None] * ret.get_frame_count()
#copy slice of animation data from original
for k, v in ret.AnimationData.items():
ret.AnimationData[k] = self.AnimationData[k][firstFrame:lastFrame + 1]
ret.Done = True
ret.Solved = True
return ret
def animdata_resample(self, fps):
'''Returns a new Animation, resampled at the specified fps'''
ret = copy.deepcopy(self) #TODO: get rid of this deepcopy (too memory hungry)
ret.FPS = fps
frameCount = ret.get_frame_count()
#clear existing animation data
ret.AnimationData = {}
for body in ret.Character.BodyList:
ret.AnimationData[str(body.Name)] = [None] * frameCount
#do the resampling
for frame in range(frameCount):
time = frame / (frameCount-1)
interpData = self.animdata_get_interpolated(time, self.Character.DefaultRoot)
for body in ret.Character.BodyList:
ret.AnimationData[str(body.Name)][frame] = interpData[body.Name][0]
return ret
def animdata_get_interpolated(self, time, root):
'''Returns the interpolated state at time, where time is 0 to 1'''
assert(0.0 <= time <= 1.0)
nframes = (len(self.AnimationData.items()[0][1])-1)
frameA = int(math.floor(time * nframes))
frameB = int(math.ceil(time * nframes))
if frameA == frameB:
ret = {}
for k, v in self.AnimationData.items():
ret[k] = self.AnimationData[k][frameA:frameB + 1]
return ret
else:
timeA = frameA / nframes
timeB = frameB / nframes
timeAB = (time - timeA) / (timeB - timeA)
a,b = {},{}
for k, v in self.AnimationData.items():
a[k] = self.AnimationData[k][frameA:frameA + 1]
b[k] = self.AnimationData[k][frameB:frameB + 1]
ret = frame_interpolate(self.Character, root, a, b, timeAB)
return ret
def blend(self, other, weight, root=None, fps=25):
'''Returns a new Animation, the result of blending between self and
other at the specified weight, using root as the root point (body), and
sampled at the specified fps'''
if root is None:
root = self.Character.DefaultRoot
LOG.info("Blending " + str(self) + " and " + str(other) + ". Weight = " + str(weight) + ". Root = " + str(root.Name))
#calculate length (in seconds) of new animation clip:
#(formula from Safonova & Hodgins / Analyzing the Physical Correctness of Interpolated Human Motion)
length = math.sqrt(math.pow(self.Length,2)*weight + math.pow(other.Length,2)*(1-weight))
ret = Animation(str(self.Name) + "_and_" + str(other.Name), length, fps, self.Character, None, None)
frameCount = ret.get_frame_count()
for body in ret.Character.BodyList:
ret.AnimationData[str(body.Name)] = [None] * frameCount
for frame in range(frameCount):
frameTime = frame / (frameCount-1)
a = self.animdata_get_interpolated(frameTime, root)
b = other.animdata_get_interpolated(frameTime, root)
tmp = frame_interpolate(self.Character, root, a, b, weight)
for body in ret.Character.BodyList:
ret.AnimationData[str(body.Name)][frame] = tmp[str(body.Name)][0]
ret.Done = True
ret.Solved = True
return ret
def _write_header(self):
ret = ''
ret += 'param pH = %f;\n' % self.get_frame_length()
ret += 'param pTimeBegin = 0;\n'
ret += 'param pTimeEnd = %i;\n' % (self.get_frame_count()-1)
ret += 'set sTimeSteps := pTimeBegin .. pTimeEnd;\n'
#write joint timing sets
for j in self.Character.get_joints_contact():
contactSet = self.get_contact_frames(j)
contactStr = '{' + (', '.join(map(str, contactSet))) + '}'
ret += 'set sTimeSteps_%sOn := %s;\n' % (j.Name, contactStr)
ret += 'set sTimeSteps_%sOff := sTimeSteps diff sTimeSteps_%sOn;\n' % (j.Name, j.Name)
ret += '\n'
ret += '\n'
return ret
def _write_specifiers(self):
ret = ''
#write constraints
for eq in self.CachedConstraintList:
ret += str(eq)
#write weighted objectives
if self.CachedObjectiveList:
ret += 'minimize objective: (\n'
for i, obj in enumerate(self.CachedObjectiveList):
ret += str(obj)
if(i == len(self.CachedObjectiveList)-1):
ret += ') / (pTimeEnd+1);\n' #we divide by time so animations of different lengths can be compared fairly
else:
ret += ' +\n'
return ret
def _write_footer(self, solver):
ret = ''
ret = 'option reset_initial_guesses 1;\n'
#ret += 'option show_stats 1;\n'
ret += 'option solver ' + solver + ';\n'
ret += 'option ipopt_options \'max_iter=10000 print_level=0\';\n' #TODO: what about other solvers? max_cpu_time=1200
ret += 'option snopt_options \'meminc=10000000\';\n' #TODO: what about other solvers?
ret += 'solve;\n'
ret += '\n'
ret += 'display solve_result;\n'
if self.CachedObjectiveList:
ret += 'display objective;\n'
#for interest we can output the values of individual objectives in the solution
for i, obj in enumerate(self.CachedObjectiveList):
ret += obj.write_debug_str()
ret += 'if solve_result = "solved" then {\n'
for frame in range(0, self.get_frame_count()):
for body in self.Character.BodyList:
varstr = ', '.join([str(body.q[x]) + '[' + str(frame) + ']' for x in range(0, dof)])
fstr = ', '.join(['%f'] * dof)
ret += '\tprintf "' + str(body.Name) + '[' + str(frame) + '] = ' + fstr + '\\n", ' + varstr + ';\n'
ret += '}\n'
ret += 'if solve_result = "solved" then{ display {j in 1.._nvars} (_varname[j],_var[j]); }\n'
ret += 'exit;\n'
return ret
def _solvedcallback(self, amplresult):
#cache solution to a file
file = open(self.Name + '.amplsol', 'w')
file.write(amplresult)
file.close()
#did it solve correctly?
self.Solved = ("solve_result = solved" in amplresult)
if self.Solved:
if self.CachedObjectiveList:
objectivematch = re.search("(?<=objective = )" + regex_float, amplresult)
self.ObjectiveValue = float(objectivematch.group(0))
#read the solution variables into a dict {indexed on body name}[frame][dof]
self.AnimationData = {}
for body in self.Character.BodyList:
self.AnimationData[str(body.Name)] = [None] * self.get_frame_count()
for frame in range(0, self.get_frame_count()):
regex_float_str = ', '.join([regex_float] * dof)
pattern = str(body.Name) + "\[" + str(frame) + "\] = " + regex_float_str
match = re.findall(pattern, amplresult)[0]
q = [float(match[x * 2]) for x in range(dof)]
self.AnimationData[str(body.Name)][frame] = q
#if looped, append an extra frame (identical to first frame, but offset)
for s in self.SpecifierList:
if isinstance(s, SpecifierPluginLoop):
for frame in range(0, 2): #duplicate first 2 frames
for b in self.Character.BodyList:
q = self.AnimationData[str(b.Name)][frame]
q = s.get_offset(q, 1) #apply offset
q = map(float, q)
self.AnimationData[str(b.Name)].append(q) #append extra frame
LOG.info('%s solved! (Objective = %f)' % (self.Name, self.ObjectiveValue))
self.export('.') #export immediately so we can see the results
else:
LOG.info('%s failed!' % self.Name)
self.Done = True #this must come last to avoid a thread sync issue
def export(self, outdir):
if self.Solved is False:
raise BaseException('Animation is not solved. Cannot export!')
'''filename = outdir + "\\" + self.Name + '.bvh'
LOG.info('Writing %s,' % filename),
file = openfile(filename, 'w')
file.write(export_bvh(self))
file.close()'''
filename = outdir + "\\" + self.Name + '.flat.bvh'
LOG.info('Writing %s,' % filename),
file = openfile(filename, 'w')
file.write(export_bvh_flat(self))
file.close()
filename = outdir + "\\" + self.Name + '.skeleton.xml'
LOG.info('Writing %s' % filename)
xmltree = ogre3d_export_animation(self)
xmltree.write(filename)
def _solve(self, solver, writeAMPL=False):
'''This handles the 'inner' (spacetime) optimization. It assumes that
length and contact timings are set. Use optimize() instead.'''
#reset the solution
self.Done = False
self.Solved = False
self.ObjectiveValue = numpy.NaN
self.AnimationData = {}
self.CachedConstraintList = []
self.CachedObjectiveList = []
#split specifiers into constraints and objectives for easier processing
for s in self.SpecifierList:
#regular constraints/objectives
if isinstance(s, Constraint):
self.CachedConstraintList.append(s)
elif isinstance(s, Objective):
self.CachedObjectiveList.append(s)
#plugins
elif isinstance(s, SpecifierPlugin):
for c in s.get_specifiers(self, self.Character):
if isinstance(c, Constraint):
self.CachedConstraintList.append(c)
elif isinstance(c, Objective):
self.CachedObjectiveList.append(c)
#generate the ampl model
amplcmd = ''
amplcmd += self._write_header()
amplcmd += self.Character.get_model() #character body & physical eq.
amplcmd += self._write_specifiers() #other constraints & objectives
amplcmd += self._write_footer(solver)
#for debugging purposes we'll write out the ampl file
if writeAMPL:
file = open(self.Name + '.ampl', 'w')
file.write(amplcmd)
file.close()
try:
#try to load cached solution
file = open(self.Name + '.amplsol', 'r')
amplresult = file.read();
file.close()
#pretend it solved, and use the callback
self._solvedcallback(amplresult)
except IOError:
#couldn't load cached solution file, we'll have to solve it with ampl
#use the thread pool for this
pool.add_job(amplsolve, args=[amplcmd], return_callback=self._solvedcallback)
def optimize(self, solver):
'''This handles the 'outer' optimization that's necessary to determine
animation length and contact timings (if they are not explicitly provided).'''
optLength = self.Length is None
optContacts = self.ContactTimesDict is None and len(self.Character.get_joints_contact()) > 0
if optLength or optContacts:
LOG.info("Starting CMA-ES optimization for %s..." % self.Name)
startPoint = []
lowerBounds = []
upperBounds = []
if optLength:
startPoint.append(0.5)
lowerBounds.append(self.get_frame_length() * 3.0) #3 frame minimum
upperBounds.append(1.0)
if optContacts:
f = 1.0 / len(self.Character.get_joints_contact())
for j, joint in enumerate(self.Character.get_joints_contact()):
evenly = (j * f) + (f / 2.0) #space the contacts evenly
startPoint.extend([evenly, 0.5])
lowerBounds.extend([0.0, 0.0])
upperBounds.extend([1.0, 1.0])
#optimize anim length and contact timings with CMA-ES
es = cma.CMAEvolutionStrategy(startPoint, 1.0 / 3.0,
{'maxiter':100, 'bounds':[lowerBounds, upperBounds]})
# iterate until termination
while not es.stop:
X = []
fit = []
for i in range(es.popsize):
curr_fit = numpy.NaN
while curr_fit is numpy.NaN:
x = es.ask(1)[0]
if optLength:
self.Length = x[0] * 3 #TODO: handle scaling better
if optContacts:
m = 1 if optLength else 0
self.ContactTimesDict = {}
for j, joint in enumerate(self.Character.get_joints_contact()):
self.ContactTimesDict[joint] = [(x[j * 2 + 0 + m], x[j * 2 + 1 + m])]
curr_fit = self._solve(solver) #might return numpy.NaN
fit.append(curr_fit)
X.append(x)
print '.',
es.tell(X, fit)
print ''
es.printline(1)
print 'termination: ', es.stopdict
print(es.best[0])
#TODO: Because we don't bother saving the animation data, we have to
#solve the best one (again) to get it. This code is just a re-run
#from above, except it solves the best one found
if optLength:
self.Length = es.best[0][0] * 3 #TODO: handle scaling better
if optContacts:
m = 1 if optLength else 0
self.ContactTimesDict = {}
for j, joint in enumerate(self.Character.get_joints_contact()):
self.ContactTimesDict[joint] = [(es.best[0][j * 2 + 0 + m], es.best[0][j * 2 + 1 + m])]
return self._solve(solver, writeAMPL=True)
else:
LOG.info("CMA-ES optimization unnecessary for %s. Solving..." % self.Name)
return self._solve(solver, writeAMPL=True)
def frame_interpolate(character, root, frameDataA, frameDataB, weight):
'''Given a character, a root body, two frames of animation data, and a
weight, this returns an interpolated frame of animation data'''
assert(0.0 <= weight <= 1.0)
#setup new animation data structure
ret = {}
for body in character.BodyList:
ret[str(body.Name)] = [None]
#traverse character, starting at root
for parent,child,joint in character.traverse_bfs(root):
if parent is None:
#special case: this is the root body
#just do a straight-forward lerp for position and rotation
dataA = frameDataA[str(child.Name)][0]
dataB = frameDataB[str(child.Name)][0]
lerpData = num_q_lerp(dataA, dataB, weight)
ret[str(child.Name)] = [lerpData]
else:
#regular case: child rotation must be handled relative to parent
#frameA
parentDataA, childDataA = frameDataA[str(parent.Name)][0], frameDataA[str(child.Name)][0]
assert(True not in [(math.isnan(x) or math.isinf(x)) for x in parentDataA+childDataA])
parentEulerA, childEulerA = parentDataA[3:dof], childDataA[3:dof]
parentQuatA, childQuatA = num_euler_to_quat(parentEulerA), num_euler_to_quat(childEulerA)
#express child relative to parent
relativeQuatA = parentQuatA.inverse() * childQuatA
#frameB
parentDataB, childDataB = frameDataB[str(parent.Name)][0], frameDataB[str(child.Name)][0]
assert(True not in [(math.isnan(x) or math.isinf(x)) for x in parentDataB+childDataB])
parentEulerB, childEulerB = parentDataB[3:dof], childDataB[3:dof]
parentQuatB, childQuatB = num_euler_to_quat(parentEulerB), num_euler_to_quat(childEulerB)
#express child relative to parent
relativeQuatB = parentQuatB.inverse() * childQuatB
#do the interpolation
relativeQuatA,relativeQuatB = relativeQuatA.normalize(), relativeQuatB.normalize()
newChildQuat = slerp(weight, relativeQuatA, relativeQuatB)
#undo relative transform
newParentData = ret[str(parent.Name)][0]
newParentEuler = newParentData[3:dof]
newParentQuat = num_euler_to_quat(newParentEuler)
newChildQuat = newParentQuat * newChildQuat
newChildEuler = num_quat_to_euler(newChildQuat)
#now calculate the position
pjp, cjp = [], []
if joint.BodyA is parent and joint.BodyB is child:
pjp, cjp = joint.PointA, joint.PointB
elif joint.BodyA is child and joint.BodyB is parent:
pjp, cjp = joint.PointB, joint.PointA
else:
raise BaseException("Output from character.traverse_bfs() makes no sense")
jointPosWorld = num_world_xf(pjp, newParentData)
jointPosWorld = map(float, jointPosWorld)
newChildPos = cgtypes.vec3(jointPosWorld) - newChildQuat.rotateVec(cgtypes.vec3(cjp))
ret[str(child.Name)] = [[newChildPos.x, newChildPos.y, newChildPos.z] + newChildEuler]
return ret
|
gpl-3.0
| -2,979,587,800,556,766,000
| 33.627172
| 147
| 0.67424
| false
| 3.077938
| false
| false
| false
|
toogad/PooPyLab_Project
|
PooPyLab/ASMModel/asm_2d.py
|
1
|
26614
|
# This file is part of PooPyLab.
#
# PooPyLab is a simulation software for biological wastewater treatment processes using International Water Association
# Activated Sludge Models.
#
# Copyright (C) Kai Zhang
#
# PooPyLab is free software: you can redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with PooPyLab. If not, see
# <http://www.gnu.org/licenses/>.
#
#
# This is the definition of the ASM1 model to be imported as part of the Reactor object
#
#
"""Definition of the IWA Activated Sludge Model #1.
Reference:
Grady Jr. et al, 1999: Biological Wastewater Treatment, 2nd Ed.
IWA Task Group on Math. Modelling for Design and Operation of Biological
Wastewater Treatment, 2000. Activated Sludge Model No. 1, in Activated
Sludge Models ASM1, ASM2, ASM2d, and ASM 3.
"""
## @namespace asm_2d
## @file asm_2d.py
from ..ASMModel import constants
from .asmbase import asm_model
class ASM_1(asm_model):
"""
Kinetics and stoichiometrics of the IWA ASM 2d model.
"""
__id = 0
def __init__(self, ww_temp=20, DO=2):
"""
Initialize the model with water temperature and dissolved O2.
Args:
ww_temp: wastewater temperature, degC;
DO: dissolved oxygen, mg/L
Return:
None
See:
_set_ideal_kinetics_20C();
_set_params();
_set_stoichs().
"""
asm_model.__init__(self)
self.__class__.__id += 1
self._set_ideal_kinetics_20C_to_defaults()
# wastewater temperature used in the model, degC
self._temperature = ww_temp
# mixed liquor bulk dissolved oxygen, mg/L
self._bulk_DO = DO
# temperature difference b/t what's used and baseline (20C), degC
self._delta_t = self._temperature - 20
self.update(ww_temp, DO)
# The Components the ASM components IN THE REACTOR
# For ASM #1:
#
# self._comps[0]: S_DO as COD
# self._comps[1]: S_I
# self._comps[2]: S_S
# self._comps[3]: S_NH
# self._comps[4]: S_NS
# self._comps[5]: S_NO
# self._comps[6]: S_ALK
# self._comps[7]: X_I
# self._comps[8]: X_S
# self._comps[9]: X_BH
# self._comps[10]: X_BA
# self._comps[11]: X_D
# self._comps[12]: X_NS
#
# ASM model components
self._comps = [0.0] * constants._NUM_ASM1_COMPONENTS
return None
def _set_ideal_kinetics_20C_to_defaults(self):
"""
Set the kinetic params/consts @ 20C to default ideal values.
See:
update();
_set_params();
_set_stoichs().
"""
# Ideal Growth Rate of Heterotrophs (u_max_H, 1/DAY)
self._kinetics_20C['u_max_H'] = 6.0
# Decay Rate of Heterotrophs (b_H, 1/DAY)
self._kinetics_20C['b_LH'] = 0.62
# Ideal Growth Rate of Autotrophs (u_max_A, 1/DAY)
self._kinetics_20C['u_max_A'] = 0.8
# Decay Rate of Autotrophs (b_A, 1/DAY)
# A wide range exists. Table 6.3 on Grady 1999 shows 0.096 (1/d). IWA's
# ASM report did not even show b_A on its table for typical value. ASIM
# software show a value of "0.000", probably cut off by the print
# function. I can only assume it was < 0.0005 (1/d) at 20C.
#self._kinetics_20C['b_LA'] = 0.096
self._kinetics_20C['b_LA'] = 0.0007
# Half Growth Rate Concentration of Heterotrophs (K_s, mgCOD/L)
self._kinetics_20C['K_S'] = 20.0
# Switch Coefficient for Dissolved O2 of Hetero. (K_OH, mgO2/L)
self._kinetics_20C['K_OH'] = 0.2
# Association Conc. for Dissolved O2 of Auto. (K_OA, mgN/L)
self._kinetics_20C['K_OA'] = 0.4
# Association Conc. for NH3-N of Auto. (K_NH, mgN/L)
self._kinetics_20C['K_NH'] = 1.0
# Association Conc. for NOx of Hetero. (K_NO, mgN/L)
self._kinetics_20C['K_NO'] = 0.5
# Hydrolysis Rate (k_h, mgCOD/mgBiomassCOD-day)
self._kinetics_20C['k_h'] = 3.0
# Half Rate Conc. for Hetero. Growth on Part. COD
# (K_X, mgCOD/mgBiomassCOD)
self._kinetics_20C['K_X'] = 0.03
# Ammonification of Org-N in biomass (k_a, L/mgBiomassCOD-day)
self._kinetics_20C['k_a'] = 0.08
# Yield of Hetero. Growth on COD (Y_H, mgBiomassCOD/mgCODremoved)
self._kinetics_20C['Y_H'] = 0.67
# Yield of Auto. Growth on TKN (Y_A, mgBiomassCOD/mgTKNoxidized)
self._kinetics_20C['Y_A'] = 0.24
# Fract. of Debris in Lysed Biomass(f_D, gDebrisCOD/gBiomassCOD)
self._kinetics_20C['f_D'] = 0.08
# Correction Factor for Hydrolysis (cf_h, unitless)
self._kinetics_20C['cf_h'] = 0.4
# Correction Factor for Anoxic Heterotrophic Growth (cf_g, unitless)
self._kinetics_20C['cf_g'] = 0.8
# Ratio of N in Active Biomass (i_N_XB, mgN/mgActiveBiomassCOD)
self._kinetics_20C['i_N_XB'] = 0.086
# Ratio of N in Debris Biomass (i_N_XD, mgN/mgDebrisBiomassCOD)
self._kinetics_20C['i_N_XD'] = 0.06
return None
def _set_params(self):
"""
Set the kinetic parameters/constants @ project temperature.
This function updates the self._params based on the model temperature
and DO.
See:
update();
_set_ideal_kinetics_20C();
_set_stoichs().
"""
# Ideal Growth Rate of Heterotrophs (u_max_H, 1/DAY)
self._params['u_max_H'] = self._kinetics_20C['u_max_H']\
* pow(1.072, self._delta_t)
# Decay Rate of Heterotrophs (b_H, 1/DAY)
self._params['b_LH'] = self._kinetics_20C['b_LH']\
* pow(1.12, self._delta_t)
# Ideal Growth Rate of Autotrophs (u_max_A, 1/DAY)
self._params['u_max_A'] = self._kinetics_20C['u_max_A']\
* pow(1.103, self._delta_t)
# Decay Rate of Autotrophs (b_A, 1/DAY)
self._params['b_LA'] = self._kinetics_20C['b_LA']\
* pow(1.114, self._delta_t)
# Half Growth Rate Concentration of Heterotrophs (K_s, mgCOD/L)
self._params['K_S'] = self._kinetics_20C['K_S']
# Switch Coefficient for Dissolved O2 of Hetero. (K_OH, mgO2/L)
self._params['K_OH'] = self._kinetics_20C['K_OH']
# Association Conc. for Dissolved O2 of Auto. (K_OA, mgN/L)
self._params['K_OA'] = self._kinetics_20C['K_OA']
# Association Conc. for NH3-N of Auto. (K_NH, mgN/L)
self._params['K_NH'] = self._kinetics_20C['K_NH']
# Association Conc. for NOx of Hetero. (K_NO, mgN/L)
self._params['K_NO'] = self._kinetics_20C['K_NO']
# Hydrolysis Rate (k_h, mgCOD/mgBiomassCOD-day)
self._params['k_h'] = self._kinetics_20C['k_h']\
* pow(1.116, self._delta_t)
# Half Rate Conc. for Hetero. Growth on Part. COD
# (K_X, mgCOD/mgBiomassCOD)
self._params['K_X'] = self._kinetics_20C['K_X']\
* pow(1.116, self._delta_t)
# Ammonification of Org-N in biomass (k_a, L/mgBiomassCOD-day)
self._params['k_a'] = self._kinetics_20C['k_a']\
* pow(1.072, self._delta_t)
# Yield of Hetero. Growth on COD (Y_H, mgBiomassCOD/mgCODremoved)
self._params['Y_H'] = self._kinetics_20C['Y_H']
# Yield of Auto. Growth on TKN (Y_A, mgBiomassCOD/mgTKNoxidized)
self._params['Y_A'] = self._kinetics_20C['Y_A']
# Fract. of Debris in Lysed Biomass(f_D, gDebrisCOD/gBiomassCOD)
self._params['f_D'] = self._kinetics_20C['f_D']
# Correction Factor for Hydrolysis (cf_h, unitless)
self._params['cf_h'] = self._kinetics_20C['cf_h']
# Correction Factor for Anoxic Heterotrophic Growth (cf_g, unitless)
self._params['cf_g'] = self._kinetics_20C['cf_g']
# Ratio of N in Active Biomass (i_N_XB, mgN/mgActiveBiomassCOD)
self._params['i_N_XB'] = self._kinetics_20C['i_N_XB']
# Ratio of N in Debris Biomass (i_N_XD, mgN/mgDebrisBiomassCOD)
self._params['i_N_XD'] = self._kinetics_20C['i_N_XD']
return None
# STOCHIOMETRIC MATRIX
def _set_stoichs(self):
"""
Set the stoichiometrics for the model.
Note:
Make sure to match the .csv model template file in the
model_builder folder, Sep 04, 2019):
_stoichs['x_y'] ==> x is process rate id, and y is component id
See:
_set_params();
_set_ideal_kinetics_20C();
update().
"""
# S_O for aerobic hetero. growth, as O2
self._stoichs['0_0'] = (self._params['Y_H'] - 1.0) \
/ self._params['Y_H']
# S_O for aerobic auto. growth, as O2
self._stoichs['2_0'] = (self._params['Y_A'] - 4.57) \
/ self._params['Y_A']
# S_S for aerobic hetero. growth, as COD
self._stoichs['0_2'] = -1.0 / self._params['Y_H']
# S_S for anoxic hetero. growth, as COD
self._stoichs['1_2'] = -1.0 / self._params['Y_H']
# S_S for hydrolysis of part. substrate
self._stoichs['6_2'] = 1.0
# S_NH required for aerobic hetero. growth, as N
self._stoichs['0_3'] = -self._params['i_N_XB']
# S_NH required for anoxic hetero. growth, as N
self._stoichs['1_3'] = -self._params['i_N_XB']
# S_NH required for aerobic auto. growth, as N
self._stoichs['2_3'] = -self._params['i_N_XB'] \
- 1.0 / self._params['Y_A']
# S_NH from ammonification, as N
self._stoichs['5_3'] = 1.0
# S_NS used by ammonification, as N
self._stoichs['5_4'] = -1.0
# S_NS from hydrolysis of part.TKN, as N
self._stoichs['7_4'] = 1.0
# S_NO for anoxic hetero. growth, as N
self._stoichs['1_5'] = (self._params['Y_H'] - 1.0) \
/ (2.86 * self._params['Y_H'])
# S_NO from nitrification, as N
self._stoichs['2_5'] = 1.0 / self._params['Y_A']
# S_ALK consumed by aerobic hetero. growth, as mM CaCO3
self._stoichs['0_6'] = -self._params['i_N_XB'] / 14.0
# S_ALK generated by anoxic hetero. growth, as mM CaCO3
self._stoichs['1_6'] = (1.0 - self._params['Y_H']) \
/ (14.0 * 2.86 * self._params['Y_H']) \
- self._params['i_N_XB'] / 14.0
# S_ALK consumed by aerobic auto. growth, as mM CaCO3
self._stoichs['2_6'] = -self._params['i_N_XB'] / 14 \
- 1.0 / (7.0 * self._params['Y_A'])
# S_ALK generated by ammonification, as mM CaCO3
self._stoichs['5_6'] = 1.0 / 14.0
# X_S from hetero. decay, as COD
self._stoichs['3_8'] = 1.0 - self._params['f_D']
# X_S from auto. decay, as COD
self._stoichs['4_8'] = 1.0 - self._params['f_D']
# X_S consumed by hydrolysis of biomass
self._stoichs['6_8'] = -1.0
# X_BH from aerobic hetero. growth, as COD
self._stoichs['0_9'] = 1.0
# X_BH from anoxic hetero. growth, as COD
self._stoichs['1_9'] = 1.0
# X_BH lost in hetero. decay, as COD
self._stoichs['3_9'] = -1.0
# X_BA from aerobic auto. growth, as COD
self._stoichs['2_10'] = 1.0
# X_BA lost in auto. decay, as COD
self._stoichs['4_10'] = -1.0
# X_D from hetero. decay, as COD
self._stoichs['3_11'] = self._params['f_D']
# X_D from auto. decay, as COD
self._stoichs['4_11'] = self._params['f_D']
# X_NS from hetero. decay, as N
self._stoichs['3_12'] = self._params['i_N_XB'] - self._params['f_D'] \
* self._params['i_N_XD']
# X_NS from auto. decay, as COD
self._stoichs['4_12'] = self._params['i_N_XB'] - self._params['f_D'] \
* self._params['i_N_XD']
# X_NS consumed in hydrolysis of part. TKN, as N
self._stoichs['7_12'] = -1.0
return None
# PROCESS RATE DEFINITIONS (Rj, M/L^3/T):
#
def _r0_AerGH(self, comps):
"""
Aerobic Growth Rate of Heterotrophs (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['u_max_H'] \
* self._monod(comps[2], self._params['K_S']) \
* self._monod(comps[0], self._params['K_OH']) \
* comps[9]
def _r1_AxGH(self, comps):
"""
Anoxic Growth Rate of Heterotrophs (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['u_max_H'] \
* self._monod(comps[2], self._params['K_S']) \
* self._monod(self._params['K_OH'], comps[0]) \
* self._monod(comps[5], self._params['K_NO']) \
* self._params['cf_g'] \
* comps[9]
def _r2_AerGA(self, comps):
"""
Aerobic Growth Rate of Autotrophs (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['u_max_A'] \
* self._monod(comps[3], self._params['K_NH']) \
* self._monod(comps[0], self._params['K_OA']) \
* comps[10]
def _r3_DLH(self, comps):
"""
Death and Lysis Rate of Heterotrophs (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['b_LH'] * comps[9]
def _r4_DLA(self, comps):
"""
Death and Lysis Rate of Autotrophs (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['b_LA'] * comps[10]
def _r5_AmmSN(self, comps):
"""
Ammonification Rate of Soluable Organic N (mgN/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['k_a'] \
* comps[4] \
* comps[9]
def _r6_HydX(self, comps):
"""
Hydrolysis Rate of Particulate Organics (mgCOD/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._params['k_h'] \
* self._monod(comps[8] / comps[9], \
self._params['K_X']) \
* (self._monod(comps[0], self._params['K_OH']) \
+ self._params['cf_h'] \
* self._monod(self._params['K_OH'], comps[0]) \
* self._monod(comps[5], self._params['K_NO'])) \
* comps[9]
def _r7_HydXN(self, comps):
"""
Hydrolysis Rate of Particulate Organic N (mgN/L/day).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._r6_HydX(comps) * comps[12] / comps[8]
# OVERALL PROCESS RATE EQUATIONS FOR INDIVIDUAL COMPONENTS
def _rate0_S_DO(self, comps):
"""
Overall process rate for dissolved O2 (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['0_0'] * self._r0_AerGH(comps)\
+ self._stoichs['2_0'] * self._r2_AerGA(comps)
def _rate1_S_I(self, comps):
"""
Overall process rate for inert soluble COD (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
0.0
"""
return 0.0
def _rate2_S_S(self, comps):
"""
Overall process rate for soluble biodegradable COD (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['0_2'] * self._r0_AerGH(comps)\
+ self._stoichs['1_2'] * self._r1_AxGH(comps)\
+ self._stoichs['6_2'] * self._r6_HydX(comps)
def _rate3_S_NH(self, comps):
"""
Overall process rate for ammonia nitrogen (mgN/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['0_3'] * self._r0_AerGH(comps)\
+ self._stoichs['1_3'] * self._r1_AxGH(comps)\
+ self._stoichs['2_3'] * self._r2_AerGA(comps)\
+ self._stoichs['5_3'] * self._r5_AmmSN(comps)
def _rate4_S_NS(self, comps):
"""
Overall process rate for soluble organic nitrogen (mgN/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['5_4'] * self._r5_AmmSN(comps)\
+ self._stoichs['7_4'] * self._r7_HydXN(comps)
def _rate5_S_NO(self, comps):
"""
Overall process rate for nitrite/nitrate nitrogen (mgN/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['1_5'] * self._r1_AxGH(comps)\
+ self._stoichs['2_5'] * self._r2_AerGA(comps)
def _rate6_S_ALK(self, comps):
"""
Overall process rate for alkalinity (mg/L/d as CaCO3)
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['0_6'] * self._r0_AerGH(comps)\
+ self._stoichs['1_6'] * self._r1_AxGH(comps)\
+ self._stoichs['2_6'] * self._r2_AerGA(comps)\
+ self._stoichs['5_6'] * self._r5_AmmSN(comps)
def _rate7_X_I(self, comps):
"""
Overall process rate for inert particulate COD (mgCOD/L/d)
Args:
comps: list of current model components (concentrations).
Return:
0.0
"""
return 0.0
def _rate8_X_S(self, comps):
"""
Overall process rate for particulate biodegradable COD (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['3_8'] * self._r3_DLH(comps)\
+ self._stoichs['4_8'] * self._r4_DLA(comps)\
+ self._stoichs['6_8'] * self._r6_HydX(comps)
def _rate9_X_BH(self, comps):
"""
Overall process rate for heterotrophic biomass (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['0_9'] * self._r0_AerGH(comps)\
+ self._stoichs['1_9'] * self._r1_AxGH(comps)\
+ self._stoichs['3_9'] * self._r3_DLH(comps)
def _rate10_X_BA(self, comps):
"""
Overall process rate for autotrophic biomass (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['2_10'] * self._r2_AerGA(comps)\
+ self._stoichs['4_10'] * self._r4_DLA(comps)
def _rate11_X_D(self, comps):
"""
Overall process rate for biomass debris (mgCOD/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['3_11'] * self._r3_DLH(comps)\
+ self._stoichs['4_11'] * self._r4_DLA(comps)
def _rate12_X_NS(self, comps):
"""
Overall process rate for particulate organic nitrogen (mgN/L/d).
Args:
comps: list of current model components (concentrations).
Return:
float
"""
return self._stoichs['3_12'] * self._r3_DLH(comps)\
+ self._stoichs['4_12'] * self._r4_DLA(comps)\
+ self._stoichs['7_12'] * self._r7_HydXN(comps)
def _dCdt(self, t, mo_comps, vol, flow, in_comps, fix_DO, DO_sat_T):
'''
Defines dC/dt for the reactor based on mass balance.
Overall mass balance:
dComp/dt == InfFlow / Actvol * (in_comps - mo_comps) + GrowthRate
== (in_comps - mo_comps) / HRT + GrowthRate
Args:
t: time for use in ODE integration routine, d
mo_comps: list of model component for mainstream outlet, mg/L.
vol: reactor's active volume, m3;
flow: reactor's total inflow, m3/d
in_comps: list of model components for inlet, mg/L;
fix_DO: whether to use a fix DO setpoint, bool
DO_sat_T: saturation DO of the project elev. and temp, mg/L
Return:
dC/dt of the system ([float])
ASM1 Components:
0_S_DO, 1_S_I, 2_S_S, 3_S_NH, 4_S_NS, 5_S_NO, 6_S_ALK,
7_X_I, 8_X_S, 9_X_BH, 10_X_BA, 11_X_D, 12_X_NS
'''
_HRT = vol / flow
# set DO rate to zero since DO is set to a fix conc., which is
# recommended for steady state simulation; alternatively, use the given
# KLa to dynamically estimate residual DO
if fix_DO or self._bulk_DO == 0:
result = [0.0]
else: #TODO: what if the user provides a fix scfm of air?
result = [(in_comps[0] - mo_comps[0] ) / _HRT
+ self._KLa * (DO_sat_T - mo_comps[0])
+ self._rate0_S_DO(mo_comps)]
result.append((in_comps[1] - mo_comps[1]) / _HRT
+ self._rate1_S_I(mo_comps))
result.append((in_comps[2] - mo_comps[2]) / _HRT
+ self._rate2_S_S(mo_comps))
result.append((in_comps[3] - mo_comps[3]) / _HRT
+ self._rate3_S_NH(mo_comps))
result.append((in_comps[4] - mo_comps[4]) / _HRT
+ self._rate4_S_NS(mo_comps))
result.append((in_comps[5] - mo_comps[5]) / _HRT
+ self._rate5_S_NO(mo_comps))
result.append((in_comps[6] - mo_comps[6]) / _HRT
+ self._rate6_S_ALK(mo_comps))
result.append((in_comps[7] - mo_comps[7]) / _HRT
+ self._rate7_X_I(mo_comps))
result.append((in_comps[8] - mo_comps[8]) / _HRT
+ self._rate8_X_S(mo_comps))
result.append((in_comps[9] - mo_comps[9]) / _HRT
+ self._rate9_X_BH(mo_comps))
result.append((in_comps[10] - mo_comps[10]) / _HRT
+ self._rate10_X_BA(mo_comps))
result.append((in_comps[11] - mo_comps[11]) / _HRT
+ self._rate11_X_D(mo_comps))
result.append((in_comps[12] - mo_comps[12]) / _HRT
+ self._rate12_X_NS(mo_comps))
return result[:]
def _dCdt_kz(self, mo_comps, vol, flow, in_comps):
'''
Defines dC/dt for the reactor based on mass balance.
Overall mass balance:
dComp/dt == InfFlow / Actvol * (in_comps - mo_comps) + GrowthRate
== (in_comps - mo_comps) / HRT + GrowthRate
Args:
t: time for use in ODE integration routine, d
mo_comps: list of model component for mainstream outlet, mg/L.
vol: reactor's active volume, m3;
flow: reactor's total inflow, m3/d
in_comps: list of model components for inlet, mg/L;
Return:
dC/dt of the system ([float])
ASM1 Components:
0_S_DO, 1_S_I, 2_S_S, 3_S_NH, 4_S_NS, 5_S_NO, 6_S_ALK,
7_X_I, 8_X_S, 9_X_BH, 10_X_BA, 11_X_D, 12_X_NS
'''
_HRT = vol / flow
# set DO rate to zero since DO is set to a fix conc.
result = [0.0]
result.append((in_comps[1] - mo_comps[1]) / _HRT
+ self._rate1_S_I(mo_comps))
result.append((in_comps[2] - mo_comps[2]) / _HRT
+ self._rate2_S_S(mo_comps))
result.append((in_comps[3] - mo_comps[3]) / _HRT
+ self._rate3_S_NH(mo_comps))
result.append((in_comps[4] - mo_comps[4]) / _HRT
+ self._rate4_S_NS(mo_comps))
result.append((in_comps[5] - mo_comps[5]) / _HRT
+ self._rate5_S_NO(mo_comps))
result.append((in_comps[6] - mo_comps[6]) / _HRT
+ self._rate6_S_ALK(mo_comps))
result.append((in_comps[7] - mo_comps[7]) / _HRT
+ self._rate7_X_I(mo_comps))
result.append((in_comps[8] - mo_comps[8]) / _HRT
+ self._rate8_X_S(mo_comps))
result.append((in_comps[9] - mo_comps[9]) / _HRT
+ self._rate9_X_BH(mo_comps))
result.append((in_comps[10] - mo_comps[10]) / _HRT
+ self._rate10_X_BA(mo_comps))
result.append((in_comps[11] - mo_comps[11]) / _HRT
+ self._rate11_X_D(mo_comps))
result.append((in_comps[12] - mo_comps[12]) / _HRT
+ self._rate12_X_NS(mo_comps))
return result[:]
|
gpl-3.0
| -344,751,125,513,634,200
| 30.683333
| 119
| 0.512399
| false
| 3.105847
| false
| false
| false
|
MISP/MISP-TAXII--broken-
|
taxii_client.py
|
1
|
3442
|
#!flask/bin/python
__version__ = '0.2'
import os
import argparse
import datetime
from dateutil.tz import tzutc
import libtaxii as t
import libtaxii.messages as tm
import libtaxii.clients as tc
try:
import simplejson as json
except ImportError:
import json
PID_FILE = '/tmp/taxii_client.pid'
PROXY_ENABLED = False
PROXY_SCHEME = 'http'
PROXY_STRING = '127.0.0.1:8008'
ATTACHMENTS_PATH_OUT = '/var/tmp/files_out'
"""Search for attachments in this path and attach them to the attribute"""
TAXII_SERVICE_HOST = '127.0.0.1'
TAXII_SERVICE_PORT = 4242
TAXII_SERVICE_PATH = '/inbox'
def is_process_running(pid):
try:
os.kill(pid, 0)
except OSError:
return
else:
return pid
def check_process(path):
if os.path.exists(path):
pid = int(open(path).read())
if is_process_running(pid):
print "Process %d is still running" % pid
raise SystemExit
else:
os.remove(path)
pid = os.getpid()
open(path, 'w').write(str(pid))
return pid
def create_inbox_message(data, content_binding=t.VID_CERT_EU_JSON_10):
"""Creates TAXII message from data"""
content_block = tm.ContentBlock(
content_binding=content_binding,
content=data,
timestamp_label=datetime.datetime.now(tzutc()))
msg_id = tm.generate_message_id()
inbox_message = tm.InboxMessage(
message_id=msg_id,
content_blocks=[content_block])
return msg_id, inbox_message.to_json()
def main(**args):
check_process(PID_FILE)
client = tc.HttpClient()
if PROXY_ENABLED:
client.proxy_type = PROXY_SCHEME
client.proxy_string = PROXY_STRING
msg_id, msg = '', ''
if args['data_type'] == 'string':
msg_id, msg = create_inbox_message(args['data'])
else:
print '[-] Please use a JSON string'
raise SystemExit
http_response = client.callTaxiiService2(
args['host'], args['path'],
t.VID_CERT_EU_JSON_10, msg, args['port'])
taxii_response = t.get_message_from_http_response(http_response, msg_id)
print(taxii_response.to_json())
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='TAXII Client',
epilog='DO NOT USE IN PRODUCTION',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument(
"-t", "--type",
dest="data_type",
choices=['string'],
default='string',
help='Data type your posting, "sync" will read DB')
parser.add_argument(
"-d", "--data",
dest="data",
required=False,
help='Data to be posted to TAXII Service')
parser.add_argument(
"-th", "--taxii_host",
dest="host",
default=TAXII_SERVICE_HOST,
help='TAXII Service Host')
parser.add_argument(
"-tp", "--taxii_port",
dest="port",
default=TAXII_SERVICE_PORT,
help='TAXII Service Port')
parser.add_argument(
"-tpath", "--taxii_path",
dest="path",
default=TAXII_SERVICE_PATH,
help='TAXII Service Path')
parser.add_argument(
"-key", "--api_key",
dest="api_key",
help='MISP API Key')
parser.add_argument(
"-v", "--version",
action='version',
version='%(prog)s {version}'.format(version=__version__))
args = parser.parse_args()
main(**vars(args))
|
agpl-3.0
| 7,958,191,412,627,987,000
| 23.06993
| 76
| 0.600813
| false
| 3.552116
| false
| false
| false
|
turdusmerula/kipartman
|
kipartman/dialogs/dialog_edit_category.py
|
1
|
2893
|
# -*- coding: utf-8 -*-
###########################################################################
## Python code generated with wxFormBuilder (version Jul 12 2017)
## http://www.wxformbuilder.org/
##
## PLEASE DO "NOT" EDIT THIS FILE!
###########################################################################
import wx
import wx.xrc
###########################################################################
## Class DialogEditCategory
###########################################################################
class DialogEditCategory ( wx.Dialog ):
def __init__( self, parent ):
wx.Dialog.__init__ ( self, parent, id = wx.ID_ANY, title = wx.EmptyString, pos = wx.DefaultPosition, size = wx.Size( 332,159 ), style = wx.DEFAULT_DIALOG_STYLE )
self.SetSizeHintsSz( wx.DefaultSize, wx.DefaultSize )
bSizer4 = wx.BoxSizer( wx.VERTICAL )
fgSizer2 = wx.FlexGridSizer( 0, 2, 0, 0 )
fgSizer2.AddGrowableCol( 1 )
fgSizer2.SetFlexibleDirection( wx.BOTH )
fgSizer2.SetNonFlexibleGrowMode( wx.FLEX_GROWMODE_SPECIFIED )
self.m_staticText1 = wx.StaticText( self, wx.ID_ANY, u"Category", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText1.Wrap( -1 )
fgSizer2.Add( self.m_staticText1, 1, wx.RIGHT|wx.LEFT|wx.ALIGN_CENTER_VERTICAL, 5 )
self.text_name = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
fgSizer2.Add( self.text_name, 1, wx.ALL|wx.EXPAND, 5 )
self.m_staticText2 = wx.StaticText( self, wx.ID_ANY, u"Description", wx.DefaultPosition, wx.DefaultSize, 0 )
self.m_staticText2.Wrap( -1 )
fgSizer2.Add( self.m_staticText2, 0, wx.ALL|wx.ALIGN_CENTER_VERTICAL, 5 )
self.text_description = wx.TextCtrl( self, wx.ID_ANY, wx.EmptyString, wx.DefaultPosition, wx.DefaultSize, 0 )
fgSizer2.Add( self.text_description, 1, wx.ALL|wx.EXPAND, 5 )
bSizer4.Add( fgSizer2, 1, wx.EXPAND, 5 )
bSizer1 = wx.BoxSizer( wx.VERTICAL )
bSizer2 = wx.BoxSizer( wx.HORIZONTAL )
self.button_validate = wx.Button( self, wx.ID_OK, u"Add", wx.DefaultPosition, wx.DefaultSize, 0 )
self.button_validate.SetDefault()
bSizer2.Add( self.button_validate, 0, wx.ALL, 5 )
self.button_cancel = wx.Button( self, wx.ID_CANCEL, u"Cancel", wx.DefaultPosition, wx.DefaultSize, 0 )
bSizer2.Add( self.button_cancel, 0, wx.ALL, 5 )
bSizer1.Add( bSizer2, 0, wx.TOP|wx.ALIGN_CENTER_HORIZONTAL, 5 )
bSizer4.Add( bSizer1, 0, wx.EXPAND|wx.ALIGN_RIGHT, 5 )
self.SetSizer( bSizer4 )
self.Layout()
self.Centre( wx.BOTH )
# Connect Events
self.button_validate.Bind( wx.EVT_BUTTON, self.onValidateClick )
self.button_cancel.Bind( wx.EVT_BUTTON, self.onCancelClick )
def __del__( self ):
pass
# Virtual event handlers, overide them in your derived class
def onValidateClick( self, event ):
event.Skip()
def onCancelClick( self, event ):
event.Skip()
|
gpl-3.0
| -646,511,858,100,546,600
| 32.639535
| 163
| 0.617007
| false
| 3.07766
| false
| false
| false
|
the-virtual-brain/tvb-hpc
|
phase_plane_interactive/hackathon_cuda.py
|
1
|
4443
|
# Copyright 2017 TVB-HPC contributors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import division, print_function
import math as m
import numpy as _lpy_np
import numba.cuda as _lpy_ncu
import numba as _lpy_numba
from tvb_hpc import utils, network, model
from typing import List
# TODO Add call to the generated numbacuda code
LOG = utils.getLogger('tvb_hpc')
def make_data():
c = network.Connectivity.hcp0()
return c.nnode, c.lengths, c.nnz, c.row, c.col, c.wnz, c.nz, c.weights
def prep_arrays(nsims, nnode: int) -> List[_lpy_np.ndarray]:
"""
Prepare arrays for use with this model.
"""
dtype = _lpy_np.float32
arrs: List[_lpy_np.ndarray] = []
for key in 'input drift diffs'.split():
shape = nsims * nnode * 1
arrs.append(_lpy_np.zeros(shape, dtype))
for i, (lo, hi) in enumerate([(0, 2 * _lpy_np.pi)]):
state = _lpy_np.ones(nsims* nnode)#.random.uniform(float(lo), float(hi),
#size=(nsims* nnode ))
arrs.append(state)
param = _lpy_np.ones((nnode * 1), dtype)
arrs.append(param)
return arrs
def run_all(args):
j, speed, coupling, nnode, lengths, nz, nnz, row, col, wnz = args
dt = 1.0
lnz = []
for i in range(len(speed)):
lnz.append((lengths[nz] / speed[i] / dt).astype(_lpy_np.uintc))
#print(_lpy_np.shape(lnz))
#flat_lnz = [item for sublist in lnz for item in sublist]
#flat_lnz = _lpy_np.asarray(flat_lnz)
flat_lnz = _lpy_np.reshape(lnz, (nnz*len(speed)))
input, drift, diffs, state, param = prep_arrays(len(coupling)*len(speed),nnode)
obsrv = _lpy_np.zeros((len(coupling)*len(speed) * (max(flat_lnz) + 3 + 4000) * nnode * 2), _lpy_np.float32)
trace = _lpy_np.zeros((len(coupling)*len(speed), 400, nnode), _lpy_np.float32)
threadsperblock = len(coupling)
blockspergrid = len(speed)
for i in range(400):
Kuramoto_and_Network_and_EulerStep_inner[blockspergrid, threadsperblock](10, nnode, (max(flat_lnz) + 3 + 4000), state, input, param, drift, diffs, obsrv, nnz, flat_lnz, row, col, wnz, coupling, i * 10)
o = obsrv
o =_lpy_np.reshape(o,(len(coupling)*len(speed), (max(flat_lnz) + 3 + 4000), nnode, 2))
trace[:,i,:] = o[:,i * 10:(i + 1) * 10, :, 0].sum(axis=1)
return trace
def run():
_lpy_ncu.select_device(0)
LOG.info(_lpy_ncu.gpus)
#utils.default_target = NumbaCudaTarget
nnode, lengths, nnz, row, col, wnz, nz, weights = make_data()
# choose param space
nc, ns = 8, 8
couplings = _lpy_np.logspace(0, 1.0, nc)
speeds = _lpy_np.logspace(0.0, 2.0, ns)
# Make parallel over speed anc coupling
start = time.time()
trace = run_all((0, speeds, couplings, nnode, lengths, nz, nnz, row, col, wnz))
end = time.time()
print ("Finished simulation successfully in:")
print(end - start)
print ("Checking correctness of results")
# check correctness
n_work_items = nc * ns
r, c = _lpy_np.triu_indices(nnode, 1)
win_size = 200 # 2s
tavg = _lpy_np.transpose(trace, (1, 2, 0))
win_tavg = tavg.reshape((-1, win_size) + tavg.shape[1:])
err = _lpy_np.zeros((len(win_tavg), n_work_items))
for i, tavg_ in enumerate(win_tavg):
for j in range(n_work_items):
fc = _lpy_np.corrcoef(tavg_[:, :, j].T)
err[i, j] = ((fc[r, c] - weights[r, c])**2).sum()
# look at 2nd 2s window (converges quickly)
err_ = err[-1].reshape((speeds.size, couplings.size))
# change on fc-sc metric wrt. speed & coupling strength
derr_speed = _lpy_np.diff(err_.mean(axis=1)).sum()
derr_coupl = _lpy_np.diff(err_.mean(axis=0)).sum()
LOG.info('derr_speed=%f, derr_coupl=%f', derr_speed, derr_coupl)
print (derr_speed)
assert derr_speed > 350.0
assert derr_coupl < -500.0
print ("Results are correct")
if __name__ == '__main__':
run()
|
apache-2.0
| -1,578,581,880,672,073,700
| 38.318584
| 209
| 0.622327
| false
| 2.857235
| false
| false
| false
|
ovnicraft/openerp-server
|
openerp/tools/amount_to_text_en.py
|
1
|
5115
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from translate import _
_logger = logging.getLogger(__name__)
#-------------------------------------------------------------
#ENGLISH
#-------------------------------------------------------------
to_19 = ( 'Zero', 'One', 'Two', 'Three', 'Four', 'Five', 'Six',
'Seven', 'Eight', 'Nine', 'Ten', 'Eleven', 'Twelve', 'Thirteen',
'Fourteen', 'Fifteen', 'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen' )
tens = ( 'Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy', 'Eighty', 'Ninety')
denom = ( '',
'Thousand', 'Million', 'Billion', 'Trillion', 'Quadrillion',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn(val):
"""convert a value < 100 to English.
"""
if val < 20:
return to_19[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19[val % 10]
return dcap
def _convert_nnn(val):
"""
convert a value < 1000 to english, special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19[rem] + ' Hundred'
if mod > 0:
word = word + ' '
if mod > 0:
word = word + _convert_nn(mod)
return word
def english_number(val):
if val < 100:
return _convert_nn(val)
if val < 1000:
return _convert_nnn(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn(l) + ' ' + denom[didx]
if r > 0:
ret = ret + ', ' + english_number(r)
return ret
def amount_to_text(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = english_number(int(list[0]))
end_word = english_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'Cents' or 'Cent'
return ' '.join(filter(None, [start_word, units_name, (start_word or units_name) and (end_word or cents_name) and 'and', end_word, cents_name]))
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'en' : amount_to_text}
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='en', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: thousands six cent cinquante-quatre.
"""
import openerp.loglevels as loglevels
# if nbr > 10000000:
# _logger.warning(_("Number too large '%d', can not translate it"))
# return str(nbr)
if not _translate_funcs.has_key(lang):
_logger.warning(_("no translation function found for lang: '%s'"), lang)
#TODO: (default should be en) same as above
lang = 'en'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", int_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", int_to_text(i, lang)
else:
print int_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
| 2,974,910,350,022,178,300
| 37.171642
| 148
| 0.539198
| false
| 3.530021
| false
| false
| false
|
indautgrp/erpnext
|
erpnext/accounts/report/balance_sheet/balance_sheet.py
|
1
|
4666
|
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt, cint
from erpnext.accounts.report.financial_statements import (get_period_list, get_columns, get_data)
def execute(filters=None):
period_list = get_period_list(filters.from_fiscal_year, filters.to_fiscal_year, filters.periodicity)
asset = get_data(filters.company, "Asset", "Debit", period_list, only_current_fiscal_year=False)
liability = get_data(filters.company, "Liability", "Credit", period_list, only_current_fiscal_year=False)
equity = get_data(filters.company, "Equity", "Credit", period_list, only_current_fiscal_year=False)
provisional_profit_loss, total_credit = get_provisional_profit_loss(asset, liability, equity,
period_list, filters.company)
message, opening_balance = check_opening_balance(asset, liability, equity)
data = []
data.extend(asset or [])
data.extend(liability or [])
data.extend(equity or [])
if opening_balance and round(opening_balance,2) !=0:
unclosed ={
"account_name": "'" + _("Unclosed Fiscal Years Profit / Loss (Credit)") + "'",
"account": "'" + _("Unclosed Fiscal Years Profit / Loss (Credit)") + "'",
"warn_if_negative": True,
"currency": frappe.db.get_value("Company", filters.company, "default_currency")
}
for period in period_list:
unclosed[period.key] = opening_balance
if provisional_profit_loss:
provisional_profit_loss[period.key] = provisional_profit_loss[period.key] - opening_balance
unclosed["total"]=opening_balance
data.append(unclosed)
if provisional_profit_loss:
data.append(provisional_profit_loss)
if total_credit:
data.append(total_credit)
columns = get_columns(filters.periodicity, period_list, company=filters.company)
chart = get_chart_data(columns, asset, liability, equity)
return columns, data, message, chart
def get_provisional_profit_loss(asset, liability, equity, period_list, company):
provisional_profit_loss = {}
total_row = {}
if asset and (liability or equity):
total = total_row_total=0
currency = frappe.db.get_value("Company", company, "default_currency")
total_row = {
"account_name": "'" + _("Total (Credit)") + "'",
"account": "'" + _("Total (Credit)") + "'",
"warn_if_negative": True,
"currency": currency
}
has_value = False
for period in period_list:
effective_liability = 0.0
if liability:
effective_liability += flt(liability[-2].get(period.key))
if equity:
effective_liability += flt(equity[-2].get(period.key))
provisional_profit_loss[period.key] = flt(asset[-2].get(period.key)) - effective_liability
total_row[period.key] = effective_liability + provisional_profit_loss[period.key]
if provisional_profit_loss[period.key]:
has_value = True
total += flt(provisional_profit_loss[period.key])
provisional_profit_loss["total"] = total
total_row_total += flt(total_row[period.key])
total_row["total"] = total_row_total
if has_value:
provisional_profit_loss.update({
"account_name": "'" + _("Provisional Profit / Loss (Credit)") + "'",
"account": "'" + _("Provisional Profit / Loss (Credit)") + "'",
"warn_if_negative": True,
"currency": currency
})
return provisional_profit_loss, total_row
def check_opening_balance(asset, liability, equity):
# Check if previous year balance sheet closed
opening_balance = 0
float_precision = cint(frappe.db.get_default("float_precision")) or 2
if asset:
opening_balance = flt(asset[0].get("opening_balance", 0), float_precision)
if liability:
opening_balance -= flt(liability[0].get("opening_balance", 0), float_precision)
if equity:
opening_balance -= flt(equity[0].get("opening_balance", 0), float_precision)
if opening_balance:
return _("Previous Financial Year is not closed"),opening_balance
return None,None
def get_chart_data(columns, asset, liability, equity):
x_intervals = ['x'] + [d.get("label") for d in columns[2:]]
asset_data, liability_data, equity_data = [], [], []
for p in columns[2:]:
if asset:
asset_data.append(asset[-2].get(p.get("fieldname")))
if liability:
liability_data.append(liability[-2].get(p.get("fieldname")))
if equity:
equity_data.append(equity[-2].get(p.get("fieldname")))
columns = [x_intervals]
if asset_data:
columns.append(["Assets"] + asset_data)
if liability_data:
columns.append(["Liabilities"] + liability_data)
if equity_data:
columns.append(["Equity"] + equity_data)
return {
"data": {
'x': 'x',
'columns': columns
}
}
|
gpl-3.0
| 1,444,389,777,122,514,700
| 33.308824
| 106
| 0.692885
| false
| 2.998715
| false
| false
| false
|
gmrandazzo/PyLSS
|
pylss/lsscoltransfer.py
|
1
|
5550
|
'''
@package ssengine
lsscoltransfer was writen by Giuseppe Marco Randazzo <gmrandazzo@gmail.com>
Geneve Dec 2015
'''
#from scipy.optimize import fmin
from optimizer import simplex as fmin
from math import sqrt, pi, log10, log, exp, fabs, isnan, isinf, erf
from optseparation import drange
from time import sleep
def square(val):
""" return the square of val"""
return val*val
class LSSColTrans(object):
"""Perform the generation of LSS parameters logKw, S, alhpa1 and alpha 2
to be used in a column transfer.
Parameters
----------
c_length: float
Define the column lenght.
c_diameter: float
Define the column diameter.
c_porosity: float
Define the column particule porisity.
t0: float
Define the dead time for the unretained compounds.
v_d: float
Define the instrumental dead volume.
flow: float
Define the flow rate.
init_B: list(float)
Define the initial % of organic modifier in a gradients.
final_b: list(float)
Define the final % of organic modifier in a gradients.
tg: list(float)
Define the gradients time.
Returns
------
lss_logkw: float
Return the LSS logaritmic retention factor in water (logKw)
lss_s: float
Return the LSS S molecular parameter
alpha: list(float)
Return the column interaction factor for column 1
References
----------
Lloyd R. Snyder, John W. Dolan
High-Performance Gradient Elution:
The Practical Application of the Linear-Solvent-Strength Model
ISBN: 978-0-471-70646-5
January 2007
"""
def __init__(self, c_length, c_diameter, c_porosity, t0, v_d, flow):
self.logkw = []
self.s = []
self.alpha = []
if c_length != None and c_diameter != None and c_porosity != None:
#Column Parameters
self.c_length = float(c_length)
self.c_diameter = float(c_diameter)
self.c_porosity = float(c_porosity)
self.v0 = ((square(self.c_diameter)*self.c_length*pi*self.c_porosity)/4.)/1000.
else:
self.v0 = None
#System Parameters
self.v_d = v_d # Dwell Volume
# Gradient Parameters
self.flow = flow
self.init_B = []
self.final_B = []
self.tg = []
self.trtab = [] #table of retention times
self.tr = [] #row of retention times
self.t0 = []
if c_length != None and c_diameter != None and c_porosity != None:
for i in range(len(self.flow)):
self.t0.append(self.v0/self.flow[i])
else:
self.t0 = t0
self.td = []
for i in range(len(self.v_d)):
self.td.append(self.v_d[i]/self.flow[i])
def rtpred(self, logkw, S, tg, init_B, final_B, alpha, t0, td):
#print logkw, S, tg, alpha, t0, td
if logkw != None and S != None and alpha > 0:
DeltaFi = final_B - init_B
b = (t0 * DeltaFi) / tg
if b > 0:
try:
kw = exp(logkw)
lnk0 = log(kw*alpha[0]) - S*alpha[1]*(init_B/100.)
k0 = exp(lnk0)
tr_pred = log(b*k0*S*t0+1)/(b*S*alpha) + t0 + td
return tr_pred
except:
return 9999
else:
return 9999
else:
return 9999
def iterfun(self, lss):
res = 0.
for i in range(len(self.tr)):
tr_pred = self.rtpred(lss[0], lss[1], self.tg[i], self.init_B[i], self.final_B[i], self.alpha[i%len(self.alpha)], self.t0[i%len(self.alpha)], self.td[i%len(self.alpha)])
res += square(self.tr[i]-tr_pred)
rmsd = sqrt(res)
return rmsd
def iterfunalpha(self, alpha):
""" Return the logKw and S parameters """
self.alpha = alpha
rmsd = 0.
for i in range(len(self.trtab)):
self.tr = self.trtab[i]
lssinit = [0.1, 0.1]
#simplex optimization
logkw, s = fmin(self.iterfun, lssinit, side=[0.1, 0.1], tol=1e-10)
#calcualte retention time of all compounds with this alpha
sz_grad = len(self.flow)
for j in range(len(self.trtab[i])):
trpred = self.rtpred(logkw, s, self.tg[j % sz_grad], self.init_B[j % sz_grad], self.final_B[j % sz_grad], self.alpha[j % sz_grad], self.t0[j % sz_grad], self.td[j % sz_grad])
rmsd += square(self.trtab[i][j] - trpred)
print("%.2f %.2f [%f %f]") % (self.trtab[i][j], trpred, self.t0[j % sz_grad], self.td[j % sz_grad])
#print alpha
print ("-"*20)
sleep(1)
rmsd /= float(len(self.trtab))
rmsd = sqrt(rmsd)
return rmsd
def getlssparameters(self, trtab, tg, init_B, final_B, alpha):
self.trtab = trtab
self.tg = tg
self.init_B = init_B
self.final_B = final_B
alphainit = []
asides = []
for i in range(len(alpha)):
alphainit.append(1.0)
asides.append(0.1)
self.alpha = fmin(self.iterfunalpha, alphainit, side=asides, tol=1e-10)
for i in range(len(self.trtab)):
self.tr = trtab[i]
lssinit = [0.1, 0.1]
logkw, s = fmin(self.iterfun, lssinit, side=[0.1, 0.1], tol=1e-3)
self.logkw.append(logkw)
self.s.append(s)
return self.logkw, self.s, self.alpha
|
lgpl-3.0
| 8,786,438,165,989,413,000
| 30.005587
| 190
| 0.546126
| false
| 3.251318
| false
| false
| false
|
LACMTA/loader
|
ott/loader/otp/graph/run.py
|
1
|
2773
|
""" Run
"""
import sys
import time
import logging
log = logging.getLogger(__file__)
from ott.utils import otp_utils
from ott.utils import web_utils
from ott.utils.cache_base import CacheBase
class Run(CacheBase):
""" run OTP graph
"""
graphs = None
def __init__(self):
super(Run, self).__init__('otp')
self.graphs = otp_utils.get_graphs(self)
@classmethod
def get_args(cls):
''' run the OTP server
examples:
bin/otp_run -s call (run the call server)
bin/otp_run -v test (run the vizualizer with the test graph)
'''
parser = otp_utils.get_initial_arg_parser()
parser.add_argument('--server', '-s', required=False, action='store_true', help="run 'named' graph in server mode")
parser.add_argument('--all', '-a', required=False, action='store_true', help="run all graphs in server mode")
parser.add_argument('--viz', '-v', required=False, action='store_true', help="run 'named' graph with the vizualizer client")
parser.add_argument('--mem', '-lm', required=False, action='store_true', help="set the jvm heap memory for the graph")
args = parser.parse_args()
return args, parser
@classmethod
def run(cls):
#import pdb; pdb.set_trace()
success = False
r = Run()
args, parser = r.get_args()
graph = otp_utils.find_graph(r.graphs, args.name)
java_mem = "-Xmx1236m" if args.mem else None
if args.all or 'all' == args.name or 'a' == args.name:
success = True
for z in r.graphs:
print "running {}".format(z)
time.sleep(2)
s = otp_utils.run_otp_server(java_mem=java_mem, **z)
if s == False:
success = False
elif args.server:
success = otp_utils.run_otp_server(java_mem=java_mem, **graph)
elif args.viz:
success = otp_utils.vizualize_graph(graph_dir=graph['dir'], java_mem=java_mem)
else:
print "PLEASE select a option to either serve or vizualize graph {}".format(graph['name'])
parser.print_help()
return success
@classmethod
def static_server_cfg(cls):
r = Run()
port = r.config.get('port', 'web', '50080')
dir = r.config.get('dir', 'web', 'ott/loader/otp/graph')
return port, dir
@classmethod
def static_server(cls):
''' start a static server where
'''
success = False
port, dir = Run.static_server_cfg()
success = web_utils.background_web_server(dir, port)
return success
def main(argv=sys.argv):
Run.run()
if __name__ == '__main__':
main()
|
mpl-2.0
| 7,585,301,656,605,814,000
| 31.244186
| 136
| 0.567616
| false
| 3.610677
| false
| false
| false
|
Juan-Mateos/coll_int_ai_case
|
notebooks/sankey.py
|
1
|
7090
|
# -*- coding: utf-8 -*-
"""
Produces simple Sankey Diagrams with matplotlib.
@author: Anneya Golob & marcomanz & pierre-sassoulas
.-.
.--.( ).--.
<-. .-.-.(.-> )_ .--.
`-`( )-' `) )
(o o ) `)`-'
( ) ,)
( () ) )
`---"\ , , ,/`
`--' `--' `--'
| | | |
| | | |
' | ' |
"""
from collections import defaultdict
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import seaborn as sns
def sankey(left, right, leftWeight=None, rightWeight=None, colorDict=None,
leftLabels=None, rightLabels=None, aspect=4, rightColor=False,
fontsize=14, figure_name=None,closePlot=False):
'''
Make Sankey Diagram showing flow from left-->right
Inputs:
left = NumPy array of object labels on the left of the diagram
right = NumPy array of corresponding labels on the right of the diagram
len(right) == len(left)
leftWeight = NumPy array of weights for each strip starting from the
left of the diagram, if not specified 1 is assigned
rightWeight = NumPy array of weights for each strip starting from the
right of the diagram, if not specified the corresponding leftWeight
is assigned
colorDict = Dictionary of colors to use for each label
{'label':'color'}
leftLabels = order of the left labels in the diagram
rightLabels = order of the right labels in the diagram
aspect = vertical extent of the diagram in units of horizontal extent
rightColor = If true, each strip in the diagram will be be colored
according to its left label
Ouput:
None
'''
if leftWeight is None:
leftWeight = []
if rightWeight is None:
rightWeight = []
if leftLabels is None:
leftLabels = []
if rightLabels is None:
rightLabels = []
# Check weights
if len(leftWeight) == 0:
leftWeight = np.ones(len(left))
if len(rightWeight) == 0:
rightWeight = leftWeight
plt.figure()
plt.rc('text', usetex=False)
plt.rc('font', family='serif')
# Create Dataframe
df = pd.DataFrame({'left': left, 'right': right, 'leftWeight': leftWeight,
'rightWeight': rightWeight}, index=range(len(left)))
# Identify all labels that appear 'left' or 'right'
allLabels = pd.Series(np.r_[df.left.unique(), df.right.unique()]).unique()
# Identify left labels
if len(leftLabels) == 0:
leftLabels = pd.Series(df.left.unique()).unique()
# Identify right labels
if len(rightLabels) == 0:
rightLabels = pd.Series(df.right.unique()).unique()
# If no colorDict given, make one
if colorDict is None:
colorDict = {}
pal = "hls"
cls = sns.color_palette(pal, len(allLabels))
for i, l in enumerate(allLabels):
colorDict[l] = cls[i]
# Determine widths of individual strips
ns_l = defaultdict()
ns_r = defaultdict()
for l in leftLabels:
myD_l = {}
myD_r = {}
for l2 in rightLabels:
myD_l[l2] = df[(df.left == l) & (df.right == l2)].leftWeight.sum()
myD_r[l2] = df[(df.left == l) & (df.right == l2)].rightWeight.sum()
ns_l[l] = myD_l
ns_r[l] = myD_r
# Determine positions of left label patches and total widths
widths_left = defaultdict()
for i, l in enumerate(leftLabels):
myD = {}
myD['left'] = df[df.left == l].leftWeight.sum()
if i == 0:
myD['bottom'] = 0
myD['top'] = myD['left']
else:
myD['bottom'] = widths_left[leftLabels[i - 1]]['top'] + 0.02 * df.leftWeight.sum()
myD['top'] = myD['bottom'] + myD['left']
topEdge = myD['top']
widths_left[l] = myD
# Determine positions of right label patches and total widths
widths_right = defaultdict()
for i, l in enumerate(rightLabels):
myD = {}
myD['right'] = df[df.right == l].rightWeight.sum()
if i == 0:
myD['bottom'] = 0
myD['top'] = myD['right']
else:
myD['bottom'] = widths_right[rightLabels[i - 1]]['top'] + 0.02 * df.rightWeight.sum()
myD['top'] = myD['bottom'] + myD['right']
topEdge = myD['top']
widths_right[l] = myD
# Total vertical extent of diagram
xMax = topEdge / aspect
# Draw vertical bars on left and right of each label's section & print label
for l in leftLabels:
plt.fill_between(
[-0.02 * xMax, 0],
2 * [widths_left[l]['bottom']],
2 * [widths_left[l]['bottom'] + widths_left[l]['left']],
color=colorDict[l],
alpha=0.99
)
plt.text(
-0.05 * xMax,
widths_left[l]['bottom'] + 0.5 * widths_left[l]['left'],
l,
{'ha': 'right', 'va': 'center'},
fontsize=fontsize
)
for l in rightLabels:
plt.fill_between(
[xMax, 1.02 * xMax], 2 * [widths_right[l]['bottom']],
2 * [widths_right[l]['bottom'] + widths_right[l]['right']],
color=colorDict[l],
alpha=0.99
)
plt.text(
1.05 * xMax, widths_right[l]['bottom'] + 0.5 * widths_right[l]['right'],
l,
{'ha': 'left', 'va': 'center'},
fontsize=fontsize
)
# Plot strips
for l in leftLabels:
for l2 in rightLabels:
lc = l
if rightColor:
lc = l2
if len(df[(df.left == l) & (df.right == l2)]) > 0:
# Create array of y values for each strip, half at left value, half at right, convolve
ys_d = np.array(50 * [widths_left[l]['bottom']] + 50 * [widths_right[l2]['bottom']])
ys_d = np.convolve(ys_d, 0.05 * np.ones(20), mode='valid')
ys_d = np.convolve(ys_d, 0.05 * np.ones(20), mode='valid')
ys_u = np.array(50 * [widths_left[l]['bottom'] + ns_l[l][l2]] + 50 * [widths_right[l2]['bottom'] + ns_r[l][l2]])
ys_u = np.convolve(ys_u, 0.05 * np.ones(20), mode='valid')
ys_u = np.convolve(ys_u, 0.05 * np.ones(20), mode='valid')
# Update bottom edges at each label so next strip starts at the right place
widths_left[l]['bottom'] += ns_l[l][l2]
widths_right[l2]['bottom'] += ns_r[l][l2]
plt.fill_between(
np.linspace(0, xMax, len(ys_d)), ys_d, ys_u, alpha=0.65,
color=colorDict[lc]
)
plt.gca().axis('off')
plt.gcf().set_size_inches(6, 6)
if figure_name!=None:
plt.savefig("{}.png".format(figure_name), bbox_inches='tight', dpi=150)
if closePlot:
plt.close()
|
mit
| 4,818,465,458,635,935,000
| 35.546392
| 128
| 0.517772
| false
| 3.568193
| false
| false
| false
|
wooga/airflow
|
airflow/example_dags/example_external_task_marker_dag.py
|
1
|
3327
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
Example DAG demonstrating setting up inter-DAG dependencies using ExternalTaskSensor and
ExternalTaskMarker
In this example, child_task1 in example_external_task_marker_child depends on parent_task in
example_external_task_marker_parent. When parent_task is cleared with "Recursive" selected,
the presence of ExternalTaskMarker tells Airflow to clear child_task1 and its
downstream tasks.
ExternalTaskSensor will keep poking for the status of remote ExternalTaskMarker task at a regular
interval till one of the following will happen:
1. ExternalTaskMarker reaches the states mentioned in the allowed_states list
In this case, ExternalTaskSensor will exit with a succes status code
2. ExternalTaskMarker reaches the states mentioned in the failed_states list
In this case, ExternalTaskSensor will raise an AirflowException and user need to handle this
with multiple downstream tasks
3. ExternalTaskSensor times out
In this case, ExternalTaskSensor will raise AirflowSkipException or AirflowSensorTimeout
exception
"""
import datetime
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.sensors.external_task_sensor import ExternalTaskMarker, ExternalTaskSensor
start_date = datetime.datetime(2015, 1, 1)
with DAG(
dag_id="example_external_task_marker_parent",
start_date=start_date,
schedule_interval=None,
tags=['example'],
) as parent_dag:
# [START howto_operator_external_task_marker]
parent_task = ExternalTaskMarker(task_id="parent_task",
external_dag_id="example_external_task_marker_child",
external_task_id="child_task1")
# [END howto_operator_external_task_marker]
with DAG(
dag_id="example_external_task_marker_child",
start_date=start_date,
schedule_interval=None,
tags=['example'],
) as child_dag:
# [START howto_operator_external_task_sensor]
child_task1 = ExternalTaskSensor(task_id="child_task1",
external_dag_id=parent_dag.dag_id,
external_task_id=parent_task.task_id,
timeout=600,
allowed_states=['success'],
failed_states=['failed', 'skipped'],
mode="reschedule")
# [END howto_operator_external_task_sensor]
child_task2 = DummyOperator(task_id="child_task2")
child_task1 >> child_task2
|
apache-2.0
| -6,449,447,133,075,926,000
| 42.776316
| 97
| 0.704839
| false
| 4.287371
| false
| false
| false
|
gonicus/gosa
|
client/src/gosa/client/mqtt_service.py
|
1
|
10323
|
# This file is part of the GOsa project.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
"""
The *MQTTClientService* is responsible for connecting the *client* to the MQTT
bus, registers the required queues, listens for commands on that queues
and dispatches incoming commands to the :class:`clacks.client.command.CommandRegistry`.
**Queues**
Every client has a individual queue. It is constructed of these components::
{domain}.client.{uuid}
The client can publish and subscribe ti this queue.
There is another broadcasting queue for all clients::
{domain}.client.broadcast
The client can subscribe to this queue, but only the server can publish to that queue.
**Events**
The gosa client produces a **ClientAnnounce** event on startup which tells
the backend about the client capabilities (commands it provides) and
some hardware information.
This information is re-send when the client receives a **ClientPoll** event,
which is generated by the backend.
On client shutdown, a **ClientLeave** is emitted to tell the backend that
the client has passed away.
"""
import sys
import netifaces #@UnresolvedImport
import traceback
import logging
import random
import time
import zope.event
import datetime
from lxml import objectify, etree
from threading import Timer
from netaddr import IPNetwork
from zope.interface import implementer
from gosa.common.gjson import loads, dumps
from gosa.common.components.jsonrpc_utils import BadServiceRequest
from gosa.common.handler import IInterfaceHandler
from gosa.common.components.registry import PluginRegistry
from gosa.common.event import EventMaker
from gosa.common import Environment
from gosa.client.event import Resume
@implementer(IInterfaceHandler)
class MQTTClientService(object):
"""
Internal class to serve all available queues and commands to
the MQTT broker.
"""
_priority_ = 10
# Time instance that helps us preventing re-announce-event flooding
time_obj = None
time_int = 3
client = None
__last_announce = None
_ping_job = None
def __init__(self):
env = Environment.getInstance()
self.log = logging.getLogger(__name__)
self.log.debug("initializing MQTT service provider")
self.env = env
self.__cr = None
e = EventMaker()
self.goodbye = e.Event(e.ClientLeave(
e.Id(Environment.getInstance().uuid)
))
def _handle_message(self, topic, message):
if message[0:1] == "{":
# RPC command
self.commandReceived(topic, message)
else:
# event received
try:
xml = objectify.fromstring(message)
if hasattr(xml, "ClientPoll"):
self.__handleClientPoll()
else:
self.log.debug("unhandled event received '%s'" % xml.getchildren()[0].tag)
except etree.XMLSyntaxError as e:
self.log.error("Message parsing error: %s" % e)
def serve(self):
""" Start MQTT service for this gosa service provider. """
# Load MQTT and Command registry instances
self.client = PluginRegistry.getInstance('MQTTClientHandler')
self.client.get_client().add_connection_listener(self._on_connection_change)
self.__cr = PluginRegistry.getInstance('ClientCommandRegistry')
self.client.set_subscription_callback(self._handle_message)
def _on_connection_change(self, connected):
if connected is True:
if self.__last_announce is None or self.__last_announce < (datetime.datetime.now() - datetime.timedelta(minutes=5)):
self.__announce(send_client_announce=True, send_user_session=True)
# Send a ping on a regular base
if self._ping_job is None:
timeout = float(self.env.config.get('client.ping-interval', default=600))
sched = PluginRegistry.getInstance("SchedulerService").getScheduler()
self._ping_job = sched.add_interval_job(self.__ping, seconds=timeout, start_date=datetime.datetime.now() + datetime.timedelta(seconds=1))
else:
if self._ping_job is not None:
sched = PluginRegistry.getInstance("SchedulerService").getScheduler()
sched.unschedule_job(self._ping_job)
self._ping_job = None
def stop(self):
self.client.send_event(self.goodbye, qos=1)
self.client.close()
def __ping(self):
e = EventMaker()
info = e.Event(e.ClientPing(e.Id(self.env.uuid)))
self.client.send_event(info)
def reAnnounce(self):
"""
Re-announce signature changes to the agent.
This method waits a given amount of time and then sends re-sends
the client method-signatures.
"""
if self.__cr:
# Cancel running jobs
if self.time_obj:
self.time_obj.cancel()
self.time_obj = Timer(self.time_int, self._reAnnounce)
self.time_obj.start()
def _reAnnounce(self):
"""
Re-announces the client signatures
"""
self.__announce(send_client_announce=False, send_user_session=False)
def commandReceived(self, topic, message):
"""
Process incoming commands, coming in with session and message
information.
================= ==========================
Parameter Description
================= ==========================
message Received MQTT message
================= ==========================
Incoming messages are coming from an
:class:`gosa.common.components.mqtt_proxy.MQTTServiceProxy`. The command
result is written to the '<domain>.client.<client-uuid>' queue.
"""
err = None
res = None
name = None
args = None
kwargs = None
id_ = ''
response_topic = "%s/response" % "/".join(topic.split("/")[0:4])
try:
req = loads(message)
except Exception as e:
err = str(e)
self.log.error("ServiceRequestNotTranslatable: %s" % err)
req = {'id': topic.split("/")[-2]}
if err is None:
try:
id_ = req['id']
name = req['method']
args = req['params']
kwargs = req['kwparams']
except KeyError as e:
self.log.error("KeyError: %s" % e)
err = str(BadServiceRequest(message))
self.log.debug("received call [%s] for %s: %s(%s,%s)" % (id_, topic, name, args, kwargs))
# Try to execute
if err is None:
try:
res = self.__cr.dispatch(name, *args, **kwargs)
except Exception as e:
err = str(e)
# Write exception to log
exc_type, exc_value, exc_traceback = sys.exc_info()
self.log.error(traceback.format_exception(exc_type, exc_value, exc_traceback))
self.log.debug("returning call [%s]: %s / %s" % (id_, res, err))
response = dumps({"result": res, "id": id_})
# Get rid of it...
self.client.send_message(response, topic=response_topic)
def __handleClientPoll(self):
delay = random.randint(0, 30)
self.log.debug("received client poll - will answer in %d seconds" % delay)
time.sleep(delay)
self.__announce(send_client_announce=True, send_user_session=True)
# Send a resume to all registered plugins
zope.event.notify(Resume())
def __announce(self, send_client_announce=False, send_user_session=True):
e = EventMaker()
# Assemble network information
more = []
netinfo = []
self.__last_announce = datetime.datetime.now()
for interface in netifaces.interfaces():
i_info = netifaces.ifaddresses(interface)
# Skip lo interfaces
if not netifaces.AF_INET in i_info:
continue
# Skip lo interfaces
if not netifaces.AF_LINK in i_info:
continue
if i_info[netifaces.AF_LINK][0]['addr'] == '00:00:00:00:00:00':
continue
# Assemble ipv6 information
ip6 = ""
if netifaces.AF_INET6 in i_info:
ip = IPNetwork("%s/%s" % (i_info[netifaces.AF_INET6][0]['addr'].split("%", 1)[0],
i_info[netifaces.AF_INET6][0]['netmask']))
ip6 = str(ip)
netinfo.append(
e.NetworkDevice(
e.Name(interface),
e.IPAddress(i_info[netifaces.AF_INET][0]['addr']),
e.IPv6Address(ip6),
e.MAC(i_info[netifaces.AF_LINK][0]['addr']),
e.Netmask(i_info[netifaces.AF_INET][0]['netmask']),
e.Broadcast(i_info[netifaces.AF_INET][0]['broadcast'])))
more.append(e.NetworkInformation(*netinfo))
# Build event
if send_client_announce is True:
info = e.Event(
e.ClientAnnounce(
e.Id(self.env.uuid),
e.Name(self.env.id),
*more))
self.client.send_event(info, qos=1)
# Assemble capabilities
more = []
caps = []
for command, dsc in self.__cr.commands.items():
caps.append(
e.ClientMethod(
e.Name(command),
e.Path(dsc['path']),
e.Signature(','.join(dsc['sig'])),
e.Documentation(dsc['doc'])))
more.append(e.ClientCapabilities(*caps))
info = e.Event(
e.ClientSignature(
e.Id(self.env.uuid),
e.Name(self.env.id),
*more))
self.client.send_event(info, qos=1)
if send_user_session is True:
try:
sk = PluginRegistry.getInstance('SessionKeeper')
sk.sendSessionNotification()
except: # pragma: nocover
pass
|
lgpl-2.1
| -6,830,260,586,721,705,000
| 33.182119
| 153
| 0.576577
| false
| 4.130852
| false
| false
| false
|
YacineKhamis/Polymerge
|
polymerge.py
|
1
|
2413
|
import sys
import os
import binascii
import zipfile
import argparse
def createZipFile(fileToArchive):
path = fileToArchive + '.zip'
with zipfile.ZipFile(path, mode='w') as myZip:
myZip.write(fileToArchive, arcname=fileToArchive.split('/')[-1])
def stats(fileToAnalyze):
return os.stat(fileToAnalyze)
def appendTo(fileCombined, fileToAppend, pathToOutputFile):
f1 = open(fileCombined, 'rb')
fileData = f1.read()
f1.close()
f2 = open(fileToAppend, 'rb')
toAppendData = f2.read()
f2.close()
output = open(pathToOutputFile, 'wb')
output.write(fileData)
output.write(toAppendData)
output.close()
def printHexa(fileToRead):
"""
Print the content of the file passed in parameter in a user friendly flavor./
View inspired from modern hexa editor : numbered lines each containing 16 bytes.
"""
with open(fileToRead, 'rb') as binFile:
binFile.seek(0, 2)
numberBytes = binFile.tell()
j = 0
print('')
print('_____________________________________________________________')
for i in range(numberBytes):
if i % 16 == 0:
print('')
j += 1
print(format(j, '02X') + " : ", end='')
binFile.seek(i, 0)
data = binFile.read(1)
text = binascii.hexlify(data)
print(text.decode('utf-8'), end=' ')
print('')
print('__________________________________________________________________')
def MergingProcess(frontFile, toHideFile, outputFilename):
createZipFile(toHideFile)
appendTo(frontFile, toHideFile, outputFilename)
os.remove(toHideFile + '.zip')
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Polymerge script. Output a file that preserve its properties and \
embed another file as an Zip Archive.')
parser.add_argument('facadeFile')
parser.add_argument('hiddenFile')
parser.add_argument('-o', '--output')
parser.add_argument('-p', '--printFile', action="store_true")
args = parser.parse_args()
if args.printFile:
printHexa(args.facadeFile)
#printHexa(args.hiddenFile)
if args.output:
MergingProcess(args.facadeFile, args.hiddenFile, args.output.split('/')[-1])
else:
MergingProcess(args.facadeFile, args.hiddenFile, 'Polymerged_' + args.facadeFile.split('/')[-1])
|
gpl-3.0
| 6,495,256,162,737,528,000
| 29.935897
| 116
| 0.593038
| false
| 3.776213
| false
| false
| false
|
koakumaping/simple-blog
|
mysite.py
|
1
|
3305
|
#coding=utf-8
import web
import markdown
import model
from settings import *
from admin import check_login
########################################################################
class redirect:
""""""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
#----------------------------------------------------------------------
def GET(self):
""""""
web.seeother('/blog/1')
########################################################################
class index:
"""SHow Home Page"""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
pass
#----------------------------------------------------------------------
def GET(self):
""""""
active = 1
context = "Welcom to my Blog."
return model.render_template('main.html', context = context, active = active)
########################################################################
class show_scrap_all:
""""""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
#----------------------------------------------------------------------
def GET(self, id):
""""""
active = 2
NavNum = 7
id = int(id)
if id is None:
id = 1
results = db.query("SELECT COUNT(*) AS numbers FROM scrap WHERE is_deleted = 0")
pages_all = results[0].numbers
if pages_all % NavNum == 0:
pages = pages_all / NavNum
else:
pages = pages_all / NavNum + 1
offset = (id - 1) * NavNum
scrap = db.select('scrap', where = 'is_deleted = 0', limit=NavNum, offset = offset, order = 'id desc')
if len(scrap) == 0:
return 'No scrap!'
return model.render_template('blog/index.html', scrap = scrap, pages = pages, active = active, id = id)
########################################################################
class show_scrap:
""""""
#----------------------------------------------------------------------
def __init__(self):
"""Constructor"""
pass
#----------------------------------------------------------------------
def GET(self, title):
""""""
active = 2
#if web.ctx.ip != '127.0.0.1':
try:
results = db.select('scrap', what = 'file_type,counter,content', where = 'title = $title and is_deleted = 0', vars = locals())
results = results[0]
path = results.file_type
counter = results.counter
content = results.content
if 'md' in path:
scrap = markdown.markdown(content, extensions=['markdown.extensions.extra'])
#scrap = model.md2html(path)
else:
scrap = content
db.query('UPDATE scrap SET scrap.counter=scrap.counter+1 WHERE title=$title', vars = locals())
except Exception as e:
print str(e)
return "Markdown file not found!"
return model.render_template('blog/show_scrap.html', scrap = scrap, active = active, counter = counter)
|
gpl-2.0
| 1,463,311,195,832,444,700
| 27.747826
| 138
| 0.378215
| false
| 5.156006
| false
| false
| false
|
simonkrogmann/planets
|
gui/scrollbar.py
|
1
|
3472
|
# -*- coding: cp1252 -*-
import Tkinter
class Scrollbar:
"""stellt eine Scrollbar für ein Canvas-Objekt bereit,
Parent muss dieses Canvas-Objekt als Attribut besitzen."""
def __init__(self, Parent, X):
self.Parent = Parent
self.X = X
self.Bar = self.Parent.Canvas.create_rectangle(0, 0, 2, 2, state = Tkinter.HIDDEN,
fill = "#444444", outline = "")
self.Parent.Canvas.bind("<Configure>", self.UpdateRegion)
self.Parent.Canvas.bind("<MouseWheel>", self.Wheel)
self.Parent.Canvas.tag_bind(self.Bar, "<Button-1>", self.ScrollBegin)
self.Parent.Canvas.tag_bind(self.Bar, "<B1-Motion>", self.ScrollMotion)
self.Scrolling = False
def Wheel(self, e):
"""scrollt die Ansicht entsprechend der Mausradbewegung"""
if self.Scrollable:
self.Parent.Canvas.yview_scroll(-e.delta/120, "units")
if self.Parent.Active:
self.Parent.Active[0].Motion(e)
self.UpdateBar()
def Offset(self):
"""gibt die Höhe des Bereiches zurück, der nach oben aus der Ansicht herausgescrollt ist"""
return self.Parent.Canvas.yview()[0] * self.Region
def UpdateRegion(self, e = None):
"""aktualisiert den scrollbaren Bereich"""
# Die Zahlen, die in dieser Methode addiert,
# werden gleichen Ungenauigkeiten im Canvas-Objekt aus.
# ein vorhandenes e weist auf Aufruf durch "configure"-event hin
# und eine Höhenveränderung des Canvas hin
if e:
self.Height = e.height + 8
# bestimmt benötigte Höhe der Liste
self.Region = self.Parent.Height() + 1
# prüft ob eine Scrollbar benötigt wird
if self.Region + 3 <= self.Height:
self.Parent.Canvas.config(scrollregion = (0, 0, 0, self.Height - 8))
self.Scrollable = False
self.Show(0)
self.Parent.Canvas.itemconfig(self.Bar, state = Tkinter.HIDDEN)
else:
self.Scrollable = True
self.Parent.Canvas.itemconfig(self.Bar, state = Tkinter.NORMAL)
self.Parent.Canvas.config(scrollregion = (0, 0, 0, self.Region))
self.UpdateBar()
def UpdateBar(self):
"""zeichnet die Scrollbar neu"""
Position = self.Parent.Canvas.yview()
Begin = self.Height * Position[0] + self.Offset()
End = self.Height * Position[1] + self.Offset()
self.Parent.Canvas.coords(self.Bar, self.X - 11, Begin, self.X - 3, End)
self.Parent.Canvas.tag_raise(self.Bar)
def ScrollBegin(self, e):
"""speichert die Position des Mausklicks beim Beginnen des Scrollens"""
if self.Scrollable:
self.DragHeight = float(e.y) / self.Height - self.Parent.Canvas.yview()[0]
def ScrollMotion(self, e):
"""zieht die neue Mausposition von der gepeicherten ab und
legt danach die Scrollrichtung und -weite fest"""
if self.Scrollable:
self.Parent.Canvas.yview_moveto(float(e.y) / self.Height - self.DragHeight)
self.UpdateBar()
def Show(self, Position):
"""scrollt zum Listenelement mit dem Index Position"""
if self.Scrollable:
self.Parent.Canvas.yview_moveto(Position / float(self.Region))
self.UpdateBar()
def Raise(self):
"""zeigt die Scrollbar im Vordergrund an"""
self.Parent.Canvas.tag_raise(self.Bar)
|
mit
| -3,531,107,996,626,620,400
| 40.831325
| 99
| 0.613767
| false
| 3.37415
| true
| false
| false
|
globocom/database-as-a-service
|
dbaas/dashboard/views.py
|
1
|
2317
|
import logging
from django.template import RequestContext
from django.shortcuts import render_to_response
from django.contrib.auth.decorators import login_required
from django.core.paginator import Paginator, EmptyPage, InvalidPage
from physical.models import DatabaseInfra
from logical.models import Database
from util import get_credentials_for
from dbaas_credentials.models import CredentialType
from physical.models import Environment
LOG = logging.getLogger(__name__)
@login_required
def dashboard(request):
env_id = request.GET.get('env_id')
engine_type = request.GET.get('engine_type')
dbinfra_list = DatabaseInfra.objects.all().order_by('name')
url_par = "?"
if env_id or engine_type:
if env_id:
url_par += "env_id=" + str(env_id) + "&"
dbinfra_list = dbinfra_list.filter(environment__id=env_id)
if engine_type:
url_par += "engine_type=" + str(engine_type) + "&"
dbinfra_list = dbinfra_list.filter(engine__engine_type__name=engine_type)
paginator = Paginator(dbinfra_list,100)
try:
page = int(request.GET.get('page','1'))
except:
page = 1
try:
dbinfra = paginator.page(page)
except(EmptyPage, InvalidPage):
dbinfra = paginator.page(paginator.num_pages)
return render_to_response("dashboard/dashboard.html", {'dbinfra': dbinfra, 'url_par': url_par}, context_instance=RequestContext(request))
@login_required
def databaseinfra(request, infra_id):
dbinfra = DatabaseInfra.objects.get(pk=infra_id)
databases = Database.objects.filter(databaseinfra=dbinfra)
return render_to_response("dashboard/databaseinfra.html", {'infra': dbinfra, 'databases': databases}, context_instance=RequestContext(request))
@login_required
def sofia_dashboard(request):
credential = get_credentials_for(
environment=Environment.objects.first(),
credential_type=CredentialType.GRAFANA
)
sofia_dashboard = "{}/{}?var-datasource={}".format(
credential.endpoint,
credential.get_parameter_by_name('sofia_dbaas_dashboard'),
credential.get_parameter_by_name('datasource')
)
return render_to_response("dashboard/sofia_dashboard.html", {'sofia_dashboard':sofia_dashboard}, context_instance=RequestContext(request))
|
bsd-3-clause
| -7,126,134,115,814,491,000
| 34.646154
| 147
| 0.702201
| false
| 3.677778
| false
| false
| false
|
mpfeppat/mpfeppat
|
tools/simplegraph.py
|
1
|
1860
|
# -*- coding: utf-8 -*-
import random
import numpy
def createCGraph(n):
Edges = []
Vertices = []
counter = n
while counter > 0:
if Vertices == []:
v0 = vertex('0')
v1 = vertex('1')
e0 = edge(v0,v1)
Vertices.append(v0)
Vertices.append(v1)
Edges.append(e0)
else :
vs = random.choice(Vertices)
ve = random.choice(Vertices)
while ve == vertex('0') :
ve = random.choice(Vertices)
e = edge(vs,ve)
prob = random.randint(0,100)
if vs == ve or e in Edges or prob > 75 :
l = len(Vertices)
name = str(l)
nv = vertex(name)
ne = edge(vs,nv)
Vertices.append(nv)
Edges.append(ne)
else :
Edges.append(e)
counter = counter - 1
k = len(Vertices)
M = numpy.zeros((k,k),dtype = object)
for ed in Edges:
vs = int(ed.startingvertex.name)
ve = int(ed.endingvertex.name)
M[vs,ve] = 1
return Vertices, Edges, M
class vertex:
def __init__(self,name):
self.name = name
def __eq__(self,other):
return self.name == other.name
def __str__(self):
return self.name
def __repr__(self):
return self.name
class edge:
def __init__(self,startingvertex,endingvertex):
self.startingvertex = startingvertex
self.endingvertex = endingvertex
def __eq__(self,other):
return self.startingvertex == other.startingvertex and self.endingvertex == other.endingvertex
def __str__(self):
return self.startingvertex.name+'-->'+self.endingvertex.name
def __repr__(self):
return self.startingvertex.name+'-->'+self.endingvertex.name
|
apache-2.0
| 7,364,874,894,004,989,000
| 25.571429
| 102
| 0.52043
| false
| 3.803681
| false
| false
| false
|
anselmobd/fo2
|
src/manutencao/migrations/0005_maquina.py
|
1
|
1069
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2019-07-12 18:49
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('manutencao', '0004_fo2_man_unidade_tempo_loaddata'),
]
operations = [
migrations.CreateModel(
name='Maquina',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nome', models.CharField(db_index=True, max_length=50)),
('slug', models.SlugField()),
('descricao', models.CharField(max_length=250, verbose_name='Descrição')),
('tipo_maquina', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='manutencao.TipoMaquina', verbose_name='Tipo de máquina')),
],
options={
'verbose_name': 'Máquina',
'db_table': 'fo2_man_maquina',
},
),
]
|
mit
| 3,103,723,040,811,429,400
| 34.5
| 158
| 0.578404
| false
| 3.659794
| false
| false
| false
|
RedHatQE/cfme_tests
|
cfme/tests/containers/test_reports.py
|
1
|
15145
|
# -*- coding: utf-8 -*-
import re
from traceback import format_exc
import pytest
from wrapanapi.utils import eval_strings
from cfme.containers.provider import ContainersProvider
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.utils.wait import TimedOutError
pytestmark = [
pytest.mark.usefixtures('setup_provider'),
pytest.mark.meta(
server_roles='+ems_metrics_coordinator +ems_metrics_collector +ems_metrics_processor'),
pytest.mark.tier(1),
pytest.mark.long_running_env,
pytest.mark.provider([ContainersProvider], scope='function')
]
@pytest.fixture(scope='module')
def node_hardwares_db_data(appliance):
"""Grabbing hardwares table data for nodes"""
db = appliance.db.client
hardwares_table = db['hardwares']
container_nodes = db['container_nodes']
out = {}
for node in db.session.query(container_nodes).all():
out[node.name] = hardwares_table.__table__.select().where(
hardwares_table.id == node.id
).execute().fetchone()
return out
def get_vpor_data_by_name(vporizer_, name):
return [vals for vals in vporizer_ if vals.resource_name == name]
def get_report(appliance, menu_name, candu=False):
"""Queue a report by menu name , wait for finish and return it"""
try:
saved_report = appliance.collections.reports.instantiate(
type='Configuration Management',
subtype='Containers',
menu_name=menu_name,
is_candu=candu
).queue(wait_for_finish=True)
except TimedOutError:
pytest.skip('Could not find report "{}" in containers.\nTraceback:\n{}'
.format(menu_name, format_exc()))
return saved_report
def test_container_reports_base_on_options(soft_assert, appliance):
"""This test verifies that all containers options are available in the report 'based on'
Dropdown in the report creation
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
view = navigate_to(appliance.collections.reports, 'Add')
for base_on in (
'Chargeback for Images',
'Container Images',
'Container Services',
'Container Templates',
'Containers',
re.compile(r'Performance - Container\s*Nodes'),
re.compile(r'Performance - Container\s*Projects'),
'Performance - Containers'
):
compare = (base_on.match if hasattr(base_on, 'match') else base_on.__eq__)
option = [opt for opt in view.base_report_on.all_options
if compare(str(opt.text))]
soft_assert(option, 'Could not find option "{}" for base report on.'.format(base_on))
def test_report_pods_per_ready_status(appliance, soft_assert, provider):
"""Testing 'Pods per Ready Status' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
pods_per_ready_status = provider.pods_per_ready_status()
report = get_report(appliance, 'Pods per Ready Status')
for row in report.data.rows:
name = row['# Pods per Ready Status']
readiness_ui = bool(eval_strings([row['Ready Condition Status']]).pop())
if soft_assert(name in pods_per_ready_status, # this check based on BZ#1435958
'Could not find pod "{}" in openshift.'
.format(name)):
expected_readiness = bool(all(pod for pod in pods_per_ready_status.get(name, False)))
soft_assert(expected_readiness == readiness_ui,
'For pod "{}" expected readiness is "{}" Found "{}"'
.format(name, expected_readiness, readiness_ui))
def test_report_nodes_by_capacity(appliance, soft_assert, node_hardwares_db_data):
"""Testing 'Nodes By Capacity' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Nodes By Capacity')
for row in report.data.rows:
hw = node_hardwares_db_data[row['Name']]
soft_assert(hw.cpu_total_cores == int(row['CPU Cores']),
'Number of CPU cores is wrong: expected {}'
' got {}'.format(hw.cpu_total_cores, row['CPU Cores']))
# The following block is to convert whatever we have to MB
memory_ui = float(re.sub(r'[a-zA-Z,]', '', row['Memory']))
if 'gb' in row['Memory'].lower():
memory_mb_ui = memory_ui * 1024
# Shift hw.memory_mb to GB, round to the number of decimals of memory_mb_db
# and shift back to MB:
memory_mb_db = round(hw.memory_mb / 1024.0,
len(str(memory_mb_ui).split('.')[1])) * 1024
else: # Assume it's MB
memory_mb_ui = memory_ui
memory_mb_db = hw.memory_mb
soft_assert(memory_mb_ui == memory_mb_db,
'Memory (MB) is wrong for node "{}": expected {} got {}'
.format(row['Name'], memory_mb_ui, memory_mb_db))
def test_report_nodes_by_cpu_usage(appliance, soft_assert, vporizer):
"""Testing 'Nodes By CPU Usage' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Nodes By CPU Usage')
for row in report.data.rows:
vpor_values = get_vpor_data_by_name(vporizer, row["Name"])[0]
usage_db = round(vpor_values.max_cpu_usage_rate_average, 2)
usage_report = round(float(row['CPU Usage (%)']), 2)
soft_assert(usage_db == usage_report,
'CPU usage is wrong for node "{}": expected {} got {}'
.format(row['Name'], usage_db, usage_report))
def test_report_nodes_by_memory_usage(appliance, soft_assert, vporizer):
"""Testing 'Nodes By Memory Usage' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Nodes By Memory Usage')
for row in report.data.rows:
vpor_values = get_vpor_data_by_name(vporizer, row["Name"])[0]
usage_db = round(vpor_values.max_mem_usage_absolute_average, 2)
usage_report = round(float(row['Memory Usage (%)']), 2)
soft_assert(usage_db == usage_report,
'CPU usage is wrong for node "{}": expected {} got {}.'
.format(row['Name'], usage_db, usage_report))
def test_report_number_of_nodes_per_cpu_cores(appliance, soft_assert, node_hardwares_db_data):
"""Testing 'Number of Nodes per CPU Cores' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Nodes by Number of CPU Cores')
for row in report.data.rows:
hw = node_hardwares_db_data[row['Name']]
soft_assert(hw.cpu_total_cores == int(row['Hardware Number of CPU Cores']),
'Hardware Number of CPU Cores is wrong for node "{}": expected {} got {}.'
.format(row['Name'], hw.cpu_total_cores, row['Hardware Number of CPU Cores']))
def test_report_projects_by_number_of_pods(appliance, soft_assert):
"""Testing 'Projects by Number of Pods' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
container_projects = appliance.db.client['container_projects']
container_pods = appliance.db.client['container_groups']
report = get_report(appliance, 'Projects by Number of Pods')
for row in report.data.rows:
pods_count = len(container_pods.__table__.select().where(
container_pods.container_project_id ==
container_projects.__table__.select().where(
container_projects.name == row['Project Name']).execute().fetchone().id
).execute().fetchall())
soft_assert(pods_count == int(row['Number of Pods']),
'Number of pods is wrong for project "{}". expected {} got {}.'
.format(row['Project Name'], pods_count, row['Number of Pods']))
def test_report_projects_by_cpu_usage(appliance, soft_assert, vporizer):
"""Testing 'Projects By CPU Usage' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Projects By CPU Usage')
for row in report.data.rows:
vpor_values = get_vpor_data_by_name(vporizer, row["Name"])[0]
usage_db = round(vpor_values.max_cpu_usage_rate_average, 2)
usage_report = round(float(row['CPU Usage (%)']), 2)
soft_assert(usage_db == usage_report,
'CPU usage is wrong for project "{}": expected {} got {}'
.format(row['Name'], usage_db, usage_report))
def test_report_projects_by_memory_usage(appliance, soft_assert, vporizer):
"""Testing 'Projects By Memory Usage' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Projects By Memory Usage')
for row in report.data.rows:
vpor_values = get_vpor_data_by_name(vporizer, row["Name"])[0]
usage_db = round(vpor_values.max_mem_usage_absolute_average, 2)
usage_report = round(float(row['Memory Usage (%)']), 2)
soft_assert(usage_db == usage_report,
'CPU usage is wrong for project "{}": expected {} got {}.'
.format(row['Name'], usage_db, usage_report))
def test_report_pod_counts_for_container_images_by_project(appliance, provider, soft_assert):
"""Testing 'Pod counts For Container Images by Project' report,\
see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Pod counts For Container Images by Project', candu=True)
pods_api = provider.mgmt.list_pods()
pods_per_project = {}
for project in provider.mgmt.list_project_names():
pods_per_project[project] = [
pd for pd in pods_api if pd.metadata.namespace == project]
rows = list(report.data.rows)
for row in rows:
project_name, pod_name = row['Project Name'], row['Pod Name']
pod = filter(lambda pd: pd.metadata.name == pod_name,
pods_per_project[project_name])
soft_assert(pod, 'Could not find pod "{}" of project "{}" in the report.'
.format(pod_name, project_name))
pod = pod.pop()
for pd in pods_per_project[project_name]:
expected_image = pd.spec.containers[0].image
pod_images = [r['Image Name'] for r in rows if r['Pod Name'] == pod_name]
# Use 'in' since the image name in the API may include also registry and tag
soft_assert(filter(lambda img_nm: img_nm in expected_image, pod_images),
'Could not find image "{}" in pod "{}". Pod images in report: {}'
.format(expected_image, pod_name, pod_images))
def test_report_recently_discovered_pods(appliance, provider, soft_assert):
"""Testing 'Recently Discovered Pods' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Recently Discovered Pods')
pods_in_report = [row['Name'] for row in report.data.rows]
pods_per_ready_status = provider.pods_per_ready_status()
for pod in pods_per_ready_status.keys():
soft_assert(pod in pods_in_report,
'Could not find pod "{}" in report.'.format(pod))
def test_report_number_of_images_per_node(appliance, provider, soft_assert):
"""Testing 'Number of Images per Node' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
pods_api = provider.mgmt.list_pods()
report = get_report(appliance, 'Number of Images per Node', candu=True)
report_data = list(report.data.rows)
for pod in pods_api:
expected_image = pod.spec.containers[0].image
node = pod.spec.node_name
pod_name = pod.metadata.name
pod_images = [row['Image Name'] for row in report_data
if row['Pod Name'] == pod_name and
row['Node Name'] == node]
# Use 'in' since the image name in the API may include also registry and tag
is_image = filter(lambda img_nm: img_nm in expected_image, pod_images)
soft_assert(is_image,
'Expected image for pod "{0}" in node {1} is "{2}". found images: {3}'
.format(pod_name, node, expected_image, pod_images))
def test_report_projects_by_number_of_containers(appliance, provider, soft_assert):
"""Testing 'Projects by Number of Containers' report, see polarion case for more info
Polarion:
assignee: juwatts
caseimportance: medium
casecomponent: Containers
initialEstimate: 1/6h
"""
report = get_report(appliance, 'Projects by Number of Containers')
pods_api = provider.mgmt.list_pods()
# Since there is no provider column, in case of more than 1 provider we get some projects
# multiple times in the report. Because of that for each project name we are collecting
# all the 'Containers Count' columns and then checking that the containers count that we
# fetched from the API is found _in_ the counts under this project name
projects_containers_count = {}
for row in report.data.rows:
if row['Project Name'] not in projects_containers_count:
projects_containers_count[row['Project Name']] = []
projects_containers_count[row['Project Name']].append(int(row['Containers Count']))
for project_name, containers_counts in projects_containers_count.items():
containers_counts_api = sum(
[len(pod.spec.containers) for pod in pods_api
if pod.metadata.namespace == project_name]
)
soft_assert(containers_counts_api in containers_counts,
'Expected containers count for project {} should be {}. Found {} instead.'
.format(project_name, containers_counts_api, containers_counts_api))
|
gpl-2.0
| -4,475,754,717,884,814,300
| 38.440104
| 98
| 0.624497
| false
| 3.766476
| true
| false
| false
|
glehmann/uptodate
|
uptodate/plugins/copy.py
|
1
|
1817
|
#!/usr/bin/env python
#coding: iso-8859-15
#
# Copyright (C) 2005 Gaëtan Lehmann <gaetan.lehmann@jouy.inra.fr>
#
# this file is part of uptodate
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
from uptodate import *
usage = _("uptodate [options] copy name name")
summary = _("Copy a module")
description = _("""Copy is used in order to copy a module.
Example:
uptodate copy itk-app InsightToolkit-Applications""")
names = ['copy', 'cp']
options = []
def runCommand(opts, args, conf, out) :
if len(args) != 2 :
raise InvalidNbOfArgsException(usage)
module, new = args
if module not in conf.sections() :
raise ModuleNotFoundException(module)
if not opts.force and conf.has_section(new) :
if opts.batch or not yes(_("Do you want to remove the module %s?") % new, False) :
raise ModuleExistsException(new)
else :
opts.force = True
# remove new section if it already exist and --force is used
if opts.force and conf.has_section(new) :
conf.remove_section(new)
conf.add_section(new)
for prop in conf.options(module) :
conf.set(new, prop, conf.get(module, prop))
if opts.verbose :
printModule(conf, new, sys.stderr, True)
|
gpl-2.0
| 1,046,055,466,410,766,600
| 29.283333
| 84
| 0.72262
| false
| 3.346225
| false
| false
| false
|
leihaha/Django-By-Example_demo
|
bookmarks/images/forms.py
|
1
|
1308
|
from django import forms
from .models import Image
from urllib import request
from django.core.files.base import ContentFile
from django.utils.text import slugify
class ImageCreateForm(forms.ModelForm):
class Meta:
model = Image
fields = ('title', 'url', 'description')
widgets = {
'url': forms.HiddenInput,
}
def clean_url(self):
url = self.cleaned_data['url']
valid_extensions = ['jpg', 'jpeg']
extension = url.rsplit('.', 1)[1].lower()
if extension not in valid_extensions:
raise forms.ValidationError('The given URL does not match valid image extensions.')
return url
def save(self, force_insert=False,
force_update=False,
commit=True):
image = super(ImageCreateForm, self).save(commit=False)
image_url = self.cleaned_data['url']
image_name = '{}.{}'.format(slugify(image.title),
image_url.rsplit('.', 1)[1].lower())
# 从给定的URL中下载图片
response = request.urlopen(image_url)
image.image.save(image_name,
ContentFile(response.read()),
save=False)
if commit:
image.save()
return image
|
gpl-2.0
| 4,444,864,217,305,913,000
| 31.948718
| 95
| 0.570872
| false
| 4.168831
| false
| false
| false
|
rbarrois/xelpaste
|
libpaste/conf.py
|
1
|
1199
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
"""Default settings for libpaste."""
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
import appconf
from . import enums
class LibPasteConf(appconf.AppConf):
class Meta:
prefix = 'libpaste'
BASE_URL = 'https://example.org'
SITENAME = 'example.org'
# Expiry
EXPIRE_CHOICES = (
(enums.EXPIRE_ONETIME, _(u'One Time Snippet')),
(enums.EXPIRE_ONE_HOUR, _(u'In one hour')),
(enums.EXPIRE_ONE_WEEK, _(u'In one week')),
(enums.EXPIRE_ONE_MONTH, _(u'In one month')),
# ('never', _(u'Never')),
)
EXPIRE_DEFAULT = enums.EXPIRE_ONE_MONTH
# Lexer
LEXER_DEFAULT = 'python'
LEXER_LIST = enums.DEFAULT_LEXER_LIST
LEXER_WORDWRAP = ('freetext', 'text', 'rst')
# Snippets
SLUG_LENGTH = 4
SLUG_CHOICES = 'abcdefghijkmnopqrstuvwxyzABCDEFGHJKLMNOPQRSTUVWXYZ1234567890'
MAX_CONTENT_LENGTH = 250 * 1024 * 1024
BADWORD_TRIGGERS = {
'http': 5,
}
MAX_FILE_LENGTH = 10 * 1024 * 1024 # 10MB
UPLOAD_TO = 'snippets'
# Users
MAX_SNIPPETS_PER_USER = 15
ONETIME_LIMIT = 2
|
mit
| -8,430,553,452,395,966,000
| 23.469388
| 81
| 0.619683
| false
| 3.163588
| false
| false
| false
|
tarunbod/dotfiles
|
scripts/todo.py
|
1
|
1774
|
#!/usr/bin/env python3
import os.path, sys, json
from collections import OrderedDict
todo_list = OrderedDict()
file_path = os.path.expanduser('~/.todo_list.json')
if os.path.isfile(file_path):
with open(file_path, 'r') as todo_list_file:
todo_list.update(json.load(todo_list_file))
args = sys.argv[1:]
def usage():
usage = """Usage:
todo.py add <task>
todo.py list
todo.py del <task number>
todo.py done <task number>"""
print(usage)
sys.exit(0)
if len(args) < 1:
args = ["list"]
task_count = len(todo_list)
if args[0] == "add":
if len(args) != 2:
usage()
name = args[1]
todo_list[str(task_count + 1)] = {
"name": name,
"completed": False
}
print("Added " + args[1] + " to todo list")
elif args[0] == "list":
if task_count == 0:
print("Woohoo, nothing to do!")
else:
for i in range(1, task_count + 1):
task = todo_list[str(i)]
print("%d) %s (%s)" % (i, task["name"], "✔" if task["completed"] else "╳"))
elif args[0] == "del":
if len(args) != 2:
usage()
idx = args[1]
if idx in todo_list:
del todo_list[idx]
keys = sorted(todo_list)
for i in range(0, task_count - 1):
key = keys[i]
todo_list[str(i + 1)] = todo_list[key]
if int(key) >= task_count:
del todo_list[key]
else:
print("Task #%s does not exist" % idx)
elif args[0] == "done":
if len(args) != 2:
usage()
idx = args[1]
if idx in todo_list:
todo_list[idx]["completed"] = True
else:
print("Task #%s does not exist" % idx)
else:
usage()
with open(file_path, 'w') as todo_list_file:
json.dump(todo_list, todo_list_file)
|
mit
| 8,956,292,231,730,313,000
| 22.918919
| 88
| 0.537853
| false
| 3.015332
| false
| false
| false
|
att-comdev/armada
|
armada/exceptions/tiller_exceptions.py
|
1
|
5060
|
# Copyright 2017 The Armada Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from armada.exceptions.base_exception import ArmadaBaseException as ex
class TillerException(ex):
'''Base class for Tiller exceptions and error handling.'''
message = 'An unknown Tiller error occurred.'
class TillerServicesUnavailableException(TillerException):
'''
Exception for tiller service being unavailable.
**Troubleshoot:**
*Coming Soon*
'''
message = 'Tiller services unavailable.'
class ChartCleanupException(TillerException):
'''Exception that occurs during chart cleanup.'''
def __init__(self, chart_name):
message = 'An error occurred during cleanup while removing {}'.format(
chart_name)
super(ChartCleanupException, self).__init__(message)
class ListChartsException(TillerException):
'''Exception that occurs when listing charts'''
message = 'There was an error listing the Helm chart releases.'
class PostUpdateJobDeleteException(TillerException):
'''Exception that occurs when a job deletion'''
def __init__(self, name, namespace):
message = 'Failed to delete k8s job {} in {}'.format(
name, namespace)
super(PostUpdateJobDeleteException, self).__init__(message)
class PostUpdateJobCreateException(TillerException):
'''
Exception that occurs when a job creation fails.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, name, namespace):
message = 'Failed to create k8s job {} in {}'.format(
name, namespace)
super(PostUpdateJobCreateException, self).__init__(message)
class PreUpdateJobDeleteException(TillerException):
'''
Exception that occurs when a job deletion.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, name, namespace):
message = 'Failed to delete k8s job {} in {}'.format(
name, namespace)
super(PreUpdateJobDeleteException, self).__init__(message)
class PreUpdateJobCreateException(TillerException):
'''Exception that occurs when a job creation fails.'''
def __init__(self, name, namespace):
message = 'Failed to create k8s job {} in {}'.format(
name, namespace)
super(PreUpdateJobCreateException, self).__init__(message)
class ReleaseException(TillerException):
'''
Exception that occurs when a release fails to install, upgrade, delete,
or test.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, name, status, action):
til_msg = getattr(status.info, 'Description').encode()
message = 'Failed to {} release: {} - Tiller Message: {}'.format(
action, name, til_msg)
super(ReleaseException, self).__init__(message)
class ChannelException(TillerException):
'''
Exception that occurs during a failed gRPC channel creation
**Troubleshoot:**
*Coming Soon*
'''
message = 'Failed to create gRPC channel.'
class GetReleaseStatusException(TillerException):
'''
Exception that occurs during a failed Release Testing.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, release, version):
message = 'Failed to get {} status {} version'.format(
release, version)
super(GetReleaseStatusException, self).__init__(message)
class GetReleaseContentException(TillerException):
'''Exception that occurs during a failed Release Testing'''
def __init__(self, release, version):
message = 'Failed to get {} content {} version {}'.format(
release, version)
super(GetReleaseContentException, self).__init__(message)
class TillerPodNotFoundException(TillerException):
'''
Exception that occurs when a tiller pod cannot be found using the labels
specified in the Armada config.
**Troubleshoot:**
*Coming Soon*
'''
def __init__(self, labels):
message = 'Could not find Tiller pod with labels "{}"'.format(labels)
super(TillerPodNotFoundException, self).__init__(message)
class TillerPodNotRunningException(TillerException):
'''
Exception that occurs when no tiller pod is found in a running state.
**Troubleshoot:**
*Coming Soon*
'''
message = 'No Tiller pods found in running state'
class TillerVersionException(TillerException):
'''
Exception that occurs during a failed Release Testing
**Troubleshoot:**
*Coming Soon*
'''
message = 'Failed to get Tiller Version'
|
apache-2.0
| -5,653,126,201,895,981,000
| 25.492147
| 78
| 0.669763
| false
| 4.248531
| false
| false
| false
|
caogecym/muer
|
muer/settings_stage.py
|
1
|
6764
|
# Django settings for forum project.
import os.path
import forum
DEBUG = False
TEMPLATE_DEBUG = False
# for OpenID auth
ugettext = lambda s: s
LOGIN_URL = '/%s' % (ugettext('login/'))
ADMINS = (
('Yuming Cao', 'caogecym@gmail.com'),
)
SEND_BROKEN_LINK_EMAILS = True
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'muer_db',
'USER': 'caogecym',
'PASSWORD': '',
'HOST': 'muer-stage.herokuapp.com',
'PORT': '5432',
'OPTIONS': {
'autocommit': True,
}
},
}
# Parse database configuration from $DATABASE_URL
import dj_database_url
DATABASES['default'] = dj_database_url.config()
DATABASES['default']['OPTIONS'] = {
'autocommit': True,
}
# Registration regulation
MIN_USERNAME_LENGTH = 4
EMAIL_UNIQUE = True
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = 'caogecym@gmail.com'
EMAIL_HOST_PASSWORD = os.environ['EMAIL_HOST_PASSWORD']
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
# Allow all host headers
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'zh'
PROJECT_PATH = os.path.abspath(os.path.dirname(__name__))
LOCALE_PATHS = (
'%s/locale' % PROJECT_PATH,
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# /home/my_site/forum
FORUM_ROOT = os.path.abspath(forum.__path__[0])
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(PROJECT_PATH, 'public'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
# AMAZON S3 config
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_STORAGE_BUCKET_NAME = 'muer-stage'
# fix manage.py collectstatic command to only upload changed files instead of all files
AWS_PRELOAD_METADATA = True
STATIC_URL = 'https://muer-stage.s3.amazonaws.com/'
ADMIN_MEDIA_PREFIX = 'https://muer-stage.s3.amazonaws.com/static/admin/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = os.environ['SECRET_KEY']
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.csrf.CsrfViewMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'muer.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'muer.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__), 'templates').replace('\\', '/'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.admindocs',
'forum',
'south',
'storages',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
'simple': {
'format': '%(levelname)s %(asctime)s %(module)s %(message)s'
},
},
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
'muer': {
'handlers': ['console'],
'level': 'INFO',
'propagate': False,
},
}
}
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
|
gpl-2.0
| -5,950,150,926,862,549,000
| 29.062222
| 95
| 0.672235
| false
| 3.530271
| true
| false
| false
|
jcu-eresearch/TDH-rich-data-capture
|
jcudc24provisioning/controllers/method_schema_scripts.py
|
1
|
7697
|
"""
Converts MethodSchema's (data configurations) into Deform schemas. There is also a helper function
(get_method_schema_preview) for turning the created schema into HTML for preview purposes on the methods page.
"""
from collections import namedtuple
from datetime import date
from beaker.cache import cache_region
from deform.form import Form
import random
import string
import colander
import deform
from jcudc24provisioning.models import DBSession
from jcudc24provisioning.models.ca_model import CAModel
from jcudc24provisioning.models.project import MethodSchema, field_types, Project
from jcudc24provisioning.models.file_upload import upload_widget
__author__ = 'xjc01266'
# Indexes of custom_field's, field_types.
INTEGER_INDEX = 0
DECIMAL_INDEX = 1
TEXT_INPUT_INDEX = 2
TEXT_AREA_INDEX = 3
CHECKBOX_INDEX = 4
SELECT_INDEX = 5
RADIO_INDEX = 6
FILE_INDEX = 7
WEBSITE_INDEX = 8
EMAIL_INDEX = 9
PHONE_INDEX = 10
DATE_INDEX = 11
HIDDEN_INDEX = 12
class DummySession(object):
"""
Pretend/dummy session that allows file upload widgets to work in the HTML output from get_method_schema_preview.
"""
def setdefault(self, arg1, arg2):
pass
def changed(self):
pass
#@cache_region('long_term')
def get_method_schema_preview(method_schema_id):
"""
Create and render the method schema identified by method_schema_id as HTML.
:param method_schema_id: ID of the MethodSchema to preview.
:return: Deform rendered HTML form for the identified MethodSchema (Note: The <form> element is removed).
"""
method_schema = DBSession.query(MethodSchema).filter_by(id=method_schema_id).first()
if method_schema is None:
return "<p>Please create your data mapping using the standardised and custom fields.</p>"
model_schema = DataTypeSchema(method_schema)
# Create a dummy request object to make file upload widgets display properly for preview purposes.
settings = {'workflows.files': "./"}
Registry = namedtuple('registry', 'settings')
Request = namedtuple('registry', ['registry', 'session'])
dummy_request = Request(registry=Registry(settings=settings), session=DummySession())
model_schema._bind({'request': dummy_request}) # Use _bind instead of bind so the schema isn't cloned
form = Form(model_schema, action="")
display = form.render({})
display = display[display.index(">")+1:].replace("</form>", "").strip()
return display
class DataTypeModel(CAModel):
def __init__(self, schema=None, appstruct=None):
self.id = None
if schema is not None:
attrs = {child.name: None for child in schema.children}
self.__dict__.update(attrs)
model_class = type(schema.name, (DataTypeModel,), attrs)
test2 = model_class()
self._model_class = model_class
super(DataTypeModel, self).__init__(schema=schema, appstruct=appstruct)
class DataTypeSchema(colander.SchemaNode):
"""
Base Deform schema that dynamically adds all elements of a MethodSchema (including parent schema elements).
"""
def __init__(self, method_schema):
params = {}
self.__dict__['params'] = params
super(DataTypeSchema, self).__init__(colander.Mapping('ignore'), **params)
if isinstance(method_schema, MethodSchema):
fields = get_schema_fields(method_schema)
for field in fields:
self.add(field)
#def method_schema_to_model(method_schema):
# """
# This is another way of generating the schema from MethodSchema models.
# """
# fields = get_schema_fields(method_schema)
# model_schema = colander._SchemaMeta(str(method_schema.name), (colander._SchemaNode,), fields)
# return model_schema
#@cache_region('long_term')
def get_schema_fields(method_schema):
"""
Create all fields/elements of the MethodSchema, this includes:
- Hierarchically add all elements of parent schemas.
- Add Deform element display attributes such as description and placeholder.
- Dynamically create the correct widget with associated settings such as select values and mask regex.
:param method_schema: MethodSchema to generate a Deform schema from.
:return: Deform schema (that can be rendered to HTML as a form).
"""
fields = []
for parent in method_schema.parents:
fields.extend(get_schema_fields(parent))
for field in method_schema.custom_fields:
field_type = field.type == field_types[CHECKBOX_INDEX][0] and colander.Boolean() or \
field.type == field_types[DATE_INDEX][0] and colander.DateTime() or \
colander.String()
python_type = field.type == field_types[INTEGER_INDEX][0] and int or\
field.type == field_types[DECIMAL_INDEX][0] and float or\
field.type == field_types[FILE_INDEX][0] and file or\
field.type == field_types[DATE_INDEX][0] and date or\
str
if field.values is not None:
value_items = field.values.split(",")
values = ()
for value in value_items:
values = values + ((value.strip(", ").lower().replace(" ", "_"), value),)
# Website regex is basic but should validate blatant mistakes such as user misinterpreting the field for email
widget = field.type == field_types[INTEGER_INDEX][0] and deform.widget.TextInputWidget(regex_mask="^\\\\d*$", strip=False) or\
field.type == field_types[DECIMAL_INDEX][0] and deform.widget.TextInputWidget(regex_mask="^(((\\\\.\\\\d*)?)|(\\\\d+(\\\\.\\\\d*)?))$", strip=False) or\
field.type == field_types[TEXT_AREA_INDEX][0] and deform.widget.TextAreaWidget() or\
field.type == field_types[CHECKBOX_INDEX][0] and deform.widget.CheckboxWidget() or\
field.type == field_types[SELECT_INDEX][0] and deform.widget.SelectWidget(values=values) or\
field.type == field_types[RADIO_INDEX][0] and deform.widget.RadioChoiceWidget(values=values) or\
field.type == field_types[FILE_INDEX][0] and upload_widget or\
field.type == field_types[WEBSITE_INDEX][0] and deform.widget.TextInputWidget(
regex_mask="(http://)?(www\.)?([^@. ]+)(\.[^.@ ]+)(\.[^@. ]+)?(\.[^@. ]+)?(\.[^@. ]+)?", strip=False) or \
field.type == field_types[EMAIL_INDEX][0] and deform.widget.TextInputWidget(
regex_mask="[^@ ]+@[^@ ]+\.[^@ ]+", strip=False) or\
field.type == field_types[PHONE_INDEX][0] and deform.widget.TextInputWidget(
regex_mask="(\d{3}[-\.\s]??\d{3}[-\.\s]??\d{4}|\(\d{3}\)\s*\d{3}[-\.\s]??\d{4}|\d{3}[-\.\s]??\d{4})", strip=False) or\
field.type == field_types[DATE_INDEX][0] and deform.widget.DateInputWidget() or\
field.type == field_types[HIDDEN_INDEX][0] and deform.widget.HiddenWidget() or\
deform.widget.TextInputWidget()
children = []
params = {
'name': field.internal_name,
'title': "%s%s" % (field.name, field.units and " (%s)" % field.units or ""),
'widget': widget,
'description': field.description,
'placeholder': field.placeholder,
'default': field.default,
'python_type': python_type,
}
fields.append(colander.SchemaNode(field_type, *children, **params))
return fields
|
bsd-3-clause
| -6,555,447,298,200,760,000
| 41.732955
| 169
| 0.621021
| false
| 3.825547
| false
| false
| false
|
cretaceous-creature/jsk_mbzirc_task3
|
jsk_network_tools/scripts/silverhammer_lowspeed_receiver.py
|
1
|
4109
|
#!/usr/bin/env python
from jsk_network_tools.msg import FC2OCS, OCS2FC
from jsk_network_tools.silverhammer_util import *
from threading import Lock, Thread
from socket import *
from struct import Struct
import os
import rospy
import signal
import sys
import roslib
from roslib.message import get_message_class
from std_msgs.msg import Time
import diagnostic_updater
from diagnostic_msgs.msg import DiagnosticStatus
class SilverHammerUDPListener():
def __init__(self, server, buffer_size, format, message, pub):
self.server = server
self.format = format
self.pub = pub
self.message = message
self.buffer_size = buffer_size
def run(self):
recv_data, addr = self.server.recvfrom(self.buffer_size)
msg = unpackMessage(recv_data, self.format, self.message)
self.pub.publish(msg)
print "received:", msg
class SilverHammerLowspeedReceiver():
def __init__(self):
message_class_str = rospy.get_param("~message",
"jsk_network_tools/FC2OCS")
try:
self.receive_message = get_message_class(message_class_str)
except:
raise Exception("invalid topic type: %s"%message_class_str)
self.lock = Lock()
self.launched_time = rospy.Time.now()
self.diagnostic_updater = diagnostic_updater.Updater()
self.diagnostic_updater.setHardwareID("none")
self.diagnostic_updater.add("LowspeedReceiver", self.diagnosticCallback)
self.received_num = 0
self.receive_port = rospy.get_param("~receive_port", 1024)
self.receive_ip = rospy.get_param("~receive_ip", "192.168.8.101")
self.receive_buffer = rospy.get_param("~receive_buffer_size", 250)
self.socket_server = socket(AF_INET, SOCK_DGRAM)
self.socket_server.settimeout(None)
self.socket_server.bind((self.receive_ip, self.receive_port))
self.receive_format = msgToStructFormat(self.receive_message())
self.pub = rospy.Publisher("~output", self.receive_message)
self.last_received_time = rospy.Time(0)
self.last_received_time_pub = rospy.Publisher(
"~last_received_time", Time)
self.last_publish_output_time = rospy.Time(0)
self.last_publish_output_time_pub = rospy.Publisher(
"~last_publish_output_time", Time)
self.diagnostic_timer = rospy.Timer(rospy.Duration(1.0 / 10),
self.diagnosticTimerCallback)
def diagnosticTimerCallback(self, event):
self.diagnostic_updater.update()
# and publish time
with self.lock:
self.last_publish_output_time_pub.publish(self.last_publish_output_time)
self.last_received_time_pub.publish(self.last_received_time)
def diagnosticCallback(self, stat):
# always OK
stat.summary(DiagnosticStatus.OK, "OK")
with self.lock:
now = rospy.Time.now()
stat.add("Uptime [sec]",
(now - self.launched_time).to_sec())
stat.add("Time from the last reception [sec]",
(now - self.last_received_time).to_sec())
stat.add("Time from the last publish ~output [sec]",
(now - self.last_publish_output_time).to_sec())
stat.add("UDP address", self.receive_ip)
stat.add("UDP port", self.receive_port)
return stat
def run(self):
while not rospy.is_shutdown():
recv_data, addr = self.socket_server.recvfrom(self.receive_buffer)
msg = unpackMessage(recv_data, self.receive_format,
self.receive_message)
with self.lock:
self.last_received_time = rospy.Time.now()
self.pub.publish(msg)
with self.lock:
self.last_publish_output_time = rospy.Time.now()
rospy.logdebug("received:", msg)
if __name__ == "__main__":
rospy.init_node("silverhammer_lowspeed_receiver")
rc = SilverHammerLowspeedReceiver()
rc.run()
|
apache-2.0
| -3,182,837,485,987,124,000
| 41.802083
| 84
| 0.619615
| false
| 3.762821
| false
| false
| false
|
openqt/algorithms
|
leetcode/python/ac/lc872-leaf-similar-trees.py
|
1
|
1464
|
# coding=utf-8
import unittest
"""872. Leaf-Similar Trees
https://leetcode.com/problems/leaf-similar-trees/description/
Consider all the leaves of a binary tree. From left to right order, the
values of those leaves form a _leaf value sequence._

For example, in the given tree above, the leaf value sequence is `(6, 7, 4, 9,
8)`.
Two binary trees are considered _leaf-similar_ if their leaf value sequence
is the same.
Return `true` if and only if the two given trees with head nodes `root1` and
`root2` are leaf-similar.
**Note:**
* Both of the given trees will have between `1` and `100` nodes.
Similar Questions:
"""
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def leafSimilar(self, root1, root2):
"""
:type root1: TreeNode
:type root2: TreeNode
:rtype: bool
"""
l, r = [], []
self._similar(root1, l)
self._similar(root2, r)
return l == r
def _similar(self, node, leaf: list):
if node:
if not (node.left or node.right):
leaf.append(node.val)
else:
self._similar(node.left, leaf)
self._similar(node.right, leaf)
if __name__ == "__main__":
unittest.main()
|
gpl-3.0
| -7,882,174,381,876,940,000
| 22.612903
| 78
| 0.603825
| false
| 3.396752
| false
| false
| false
|
uname/bleproxy
|
PC/BleProxyDesk/view/BleListItem.py
|
1
|
1110
|
#-*- coding: utf-8 -*-
from PyQt4 import QtGui
from ui.AppIcons import *
class BleListItem(QtGui.QListWidgetItem):
def __init__(self, name, address, rssi):
QtGui.QListWidgetItem.__init__(self)
self.name, self.address, self.rssi = name, address, rssi
self.setBleInfo(rssi)
self.conflag = False
def setConnected(self, flag):
self.conflag = flag
self.setBackgroundColor(QtGui.QColor(flag and 0x00ff00 or 0xffffff))
def isConnected(self):
return self.conflag
def setBleInfo(self, rssi):
iconPath = ":app/icons/app/sig_1.png"
if rssi > -45:
iconPath = ":app/icons/app/sig_4.png"
elif rssi > -60:
iconPath = ":app/icons/app/sig_3.png"
elif rssi > -80:
iconPath = ":app/icons/app/sig_2.png"
self.setIcon(QtGui.QIcon(iconPath))
self.setText("%s\n%s %ddb\n" % (self.name, self.address, rssi))
def updateRssi(self, rssi):
self.setBleInfo(rssi)
def getAddress(self):
return self.address
|
apache-2.0
| 3,593,046,291,192,624,600
| 29.861111
| 76
| 0.579279
| false
| 3.353474
| false
| false
| false
|
Tintri/tintri-python-sdk
|
examples/set_qos_tgc_service_groups.py
|
1
|
5668
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Tintri, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from tintri.common import TintriServerError
from tintri.v310 import Tintri
from tintri.v310 import VirtualMachineQoSConfig
"""
This Python script sets the QoS of the VMs in the first TGC service group with
more than 2 VMs.
Command usage:
set_qos_tgc_service_groups.py server_name user_name password min_value max_value
Where:"
server_name - name of a TGC server
user_name - user name used to login into the TGC server
password - password for the user
min_value - the QoS minimum value for the VM
max_value - the QoS maximum value for the VM
"""
# For exhaustive messages on console, make it to True; otherwise keep it False
debug_mode = False
def print_with_prefix(prefix, out):
print(prefix + out)
return
def print_debug(out):
if debug_mode:
print_with_prefix("[DEBUG] : ", out)
return
def print_info(out):
print_with_prefix("[INFO] : ", out)
return
def print_error(out):
print_with_prefix("[ERROR] : ", out)
return
# Sets the Minimum and maximum QoS values on a TGC service group.
def set_qos(tintri, sg_uuid, new_min_value, new_max_value):
# Create new QoS object with the fields to be changed
modify_qos_info = VirtualMachineQoSConfig()
modify_qos_info.minNormalizedIops = int(new_min_value)
modify_qos_info.maxNormalizedIops = int(new_max_value)
print_debug("IOPS: " + str(modify_qos_info.minNormalizedIops) + ", " + str(modify_qos_info.maxNormalizedIops))
# Set the QoS in the service group.
tintri.update_service_group_qos_config(modify_qos_info, sg_uuid)
# Apply the QoS values that were set for the service group above.
tintri.apply_service_group_qos_config(sg_uuid)
# main
if len(sys.argv) < 6:
print("\nsets the QoS of the VMs in a TGC service group with more than 2 VMs.\n")
print("Usage: " + sys.argv[0] + " server_name user_name password min_value max_value\n")
print("Where:")
print(" server_name - name of a TGC server")
print(" user_name - user name used to login into the TGC server")
print(" password - password for the TGC and VMstore users")
print(" min_value - the QoS minimum value for the VM")
print(" max_value - the QoS maximum value for the VM")
sys.exit(-1)
server_name = sys.argv[1]
user_name = sys.argv[2]
password = sys.argv[3]
new_min_value = sys.argv[4]
new_max_value = sys.argv[5]
try:
# instantiate the Tintri server.
tintri = Tintri(server_name)
# Get version and product
version_info = tintri.version
if (not tintri.is_tgc()):
raise TintriServerError(0, cause="Tintri server needs to be Tintri Global Center, not a " + product_name)
preferred_version = version_info.preferredVersion
print("API Version: " + preferred_version)
versions = preferred_version.split(".")
major_version = versions[0]
minor_version = int(versions[1])
if major_version != "v310":
raise TintriServerError(0, cause="Incorrect major version: " + major_version + ". Should be v310.")
if minor_version < 31:
raise TintriServerError(0, cause="Incorrect minor Version: " + minor_version + ". Should be 31 or greater")
# Login to TGC
tintri.login(user_name, password)
except TintriServerError as tse:
print_error(tse.__str__())
sys.exit(2)
try:
# Get a list of service groups
service_groups = tintri.get_service_groups()
num_service_groups = service_groups.absoluteTotal
if num_service_groups == 0:
raise TintriServerError(0, cause="No Service Groups present")
print_info(str(num_service_groups) + " Service Groups present")
# Initialze the member list
sg_uuid = ""
found = False
# Look for a qualifying service group
count = 1
for sg in service_groups:
sg_name = sg.name
sg_uuid = sg.uuid.uuid
sg_member_count = sg.memberCount
print_info(str(count) + ": " + sg_name + "(" + str(sg_member_count) + "): " + sg_uuid)
if sg_member_count >= 2:
found = True
break
count += 1
if not found:
raise TintriServerError(0, cause="No service groups matching the criertia, member count >= 2.")
# Set the QoS on the service group.
set_qos(tintri, sg_uuid, new_min_value, new_max_value)
except TintriServerError as tse:
print_error(tse.__str__())
tintri.logout()
sys.exit(3)
# All pau, log out
tintri.logout()
|
bsd-3-clause
| -6,672,245,908,238,499,000
| 32.341176
| 116
| 0.678899
| false
| 3.509598
| false
| false
| false
|
exobrain-wisekb/wisekb-management-platform
|
wisekb-uima-ducc/bin/properties.py
|
2
|
8155
|
#!/usr/bin/python
import os
import re
import platform
import string
# -----------------------------------------------------------------------
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# -----------------------------------------------------------------------
class PropertiesException(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return repr(self.msg)
class Property:
def __init__(self, k, v, c):
self.k = k # key
self.v = v # value
self.c = c # comments
self.orig_v = v
def reset(self):
self.v = self.orig_v
def __str__(self):
return str(self.k) + '=' + str(self.v)
class Properties:
def __init__(self):
self.props = {}
self.builtin = {}
self.keys = []
self.comments = []
#
# Create builtins corresponding to some of the java properties.
#
# We allow expansion on java system properties. It's obviously not possible to
# do most of them but these guys may have a use e.g. to put ducc_ling into
# architecture-specific places.
#
(system, node, release, version, machine, processor) = platform.uname()
if ( system == 'Darwin' ):
self.builtin['os.arch'] = 'x86_64'
self.builtin['os.name'] = 'Mac OS X'
elif ( system == 'Linux' ):
if ( machine == 'ppc64' ):
self.builtin['os.arch'] = 'ppc64'
self.builtin['os.name'] = 'Linux'
elif ( machine == 'x86_64' ):
self.builtin['os.arch'] = 'amd64'
self.builtin['os.name'] = 'Linux'
elif ( machine == 'ppc64le' ):
self.builtin['os.arch'] = 'ppc64le'
self.builtin['os.name'] = 'Linux'
#
# Expand all ${} values. The search order is:
# 1 look in this properties file
# 2 look in the environment
# 3 look in a subset of the Java system properties (os.name & os.arch)
#
def do_subst(self, st):
key = None
p = re.compile("\\$\\{[a-zA-Z0-9_\\.\\-]+\\}")
ndx = 0
response = st.strip()
m = p.search(response, ndx)
while ( m != None ):
key = m.group()[2:-1]
val = None
if ( self.has_key(key) ):
val = self.get(key)
elif ( os.environ.has_key(key) ):
val = os.environ[key]
elif (self.builtin.has_key(key) ):
val = self.builtin[key]
if ( val != None ):
response = string.replace(response, m.group() , val)
ndx = m.start()+1
m = p.search(response, ndx)
return response
def mkitem(self, line):
#
# First deal with line comments so we can preserve them on write
#
if ( line.startswith('#') ):
self.comments.append(line)
return False
if ( line.startswith('//') ):
self.comments.append(line)
return False
if ( line == '' ):
return False
#
# Now strip off embedded comments, these are lost, but they're not valid
# for java props anyway.
#
ndx = line.find('#') # remove comments - like the java DuccProperties
if ( ndx >= 0 ):
line = line[0:ndx] # strip the comment
ndx = line.find('//') # remove comments - like the java DuccProperties
if ( ndx >= 0 ):
line = line[0:ndx] # strip the comment
line = line.strip() # clear leading and trailing whitespace
if ( line == '' ):
return
mobj = re.search('[ =:]+', line)
if ( mobj ):
key = line[:mobj.start()].strip()
val = line[mobj.end():].strip()
# print 'NEXT', mobj.start(), 'END', mobj.end(), 'KEY', key, 'VAL', val
# val = self.do_subst(val) # we'll do lazy subst on get instead
self.props[key] = Property(key, val, self.comments)
if ( key in self.keys ):
self.keys.remove(key)
self.keys.append(key)
self.comments = []
else:
self.props[line] = Property(line, '', self.comments)
self.keys.append(line)
self.comments = []
#
# Load reads a properties file and adds it contents to the
# hash. It may be called several times; each call updates
# the internal has, thus building it up. The input file is
# in the form of a java-like properties file.
#
def load(self, propsfile):
if ( not os.path.exists(propsfile) ):
raise PropertiesException(propsfile + ' does not exist and cannot be loaded.')
f = open(propsfile);
for line in f:
self.mkitem(line.strip())
f.close()
# read a jar manifest into a properties entity
def load_from_manifest(self, jarfile):
z = zipfile.ZipFile(jarfile)
items = z.read('META-INF/MANIFEST.MF').split('\n')
for item in items:
self.mkitem(item)
#
# Try to load a properties file. Just be silent if it doesn't exist.
#
def load_if_exists(self, propsfile):
if ( os.path.exists(propsfile) ):
return self.load(propsfile)
#
# Put something into the hash with an optional comment
#
def put(self, key, value, comment=[]):
self.props[key] = Property(key, value, comment)
self.keys.append(key)
#
# Put a Property object into the map
#
def put_property(self, p):
self.props[p.k] = p
self.keys.append(p.k)
#
# Get a value from the hash
#
def get(self, key):
if ( self.props.has_key(key) ):
return self.do_subst(self.props[key].v) # we'll do lazy subst on get instead
return None
#
# Get a Property object for manipulation (k, v, comment)
#
def get_property(self, key):
if ( self.props.has_key(key) ):
return self.props[key] # note no expansion.
return None
#
# Remove an item if it exists
#
def delete(self, key):
if ( self.props.has_key(key) ):
del self.props[key]
self.keys.remove(key)
#
# Write the has as a Java-like properties file
#
def write(self, propsfile):
f = open(propsfile, 'w')
for k in self.keys:
p = self.props[k]
v = p.v
c = p.c
for cc in c:
f.write(cc + '\n')
f.write(k + ' = ' + str(v) + '\n\n')
f.close()
#
# return a shallow copy of the dictionary
#
def copy_dictionary(self):
return self.props.copy()
#
# Return the entries (Property list) in the dictionary
#
def items(self):
return self.props.items()
#
# The keys, in the order as defined in the input file
#
def get_keys(self):
return self.keys
#
# check to see if the key exists in the dictionary
#
def has_key(self, key):
return self.props.has_key(key)
#
# Return the length of the dictionary
#
def __len__(self):
return len(self.props)
|
apache-2.0
| 5,952,035,096,709,450,000
| 29.657895
| 91
| 0.526548
| false
| 3.947241
| false
| false
| false
|
mxgnene01/itester
|
itester/common/termlogcolor.py
|
1
|
5853
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Meng xiangguo <mxgnene01@gmail.com>
#
# H A P P Y H A C K I N G !
# _____ ______
# ____==== ]OO|_n_n__][. | |]
# [________]_|__|________)< |MENG|
# oo oo 'oo OOOO-| oo\_ ~o~~~o~'
# +--+--+--+--+--+--+--+--+--+--+--+--+--+
# 17/5/27 下午7:54
from __future__ import print_function
import os
import sys
import datetime
'''
参考: https://pypi.python.org/pypi/termcolor
'''
__ALL__ = [ 'colored', 'cprint' ]
VERSION = (1, 1, 0)
ATTRIBUTES = dict(
list(zip([
'bold',
'dark',
'',
'underline',
'blink',
'',
'reverse',
'concealed'
],
list(range(1, 9))
))
)
del ATTRIBUTES['']
HIGHLIGHTS = dict(
list(zip([
'on_grey',
'on_red',
'on_green',
'on_yellow',
'on_blue',
'on_magenta',
'on_cyan',
'on_white'
],
list(range(40, 48))
))
)
COLORS = dict(
list(zip(['grey','red','green','yellow'],[47, 41, 42, 43]))
)
END = '\033[0m'
def colored(text, color=None, on_color=None, attrs=None):
"""Colorize text.
Available text colors:
red, green, yellow, grey.
Available text highlights:
on_red, on_green, on_yellow, on_blue, on_magenta, on_cyan, on_white.
Available attributes:
bold, dark, underline, blink, reverse, concealed.
Example:
colored('Hello, World!', 'red', 'on_grey', ['grey', 'blink'])
colored('Hello, World!', 'green')
"""
if os.getenv('ANSI_COLORS_DISABLED') is None:
fmt_str = '\033[%d;30;1m%s'
if color is not None:
text = fmt_str % (COLORS[color], text)
if on_color is not None:
text = fmt_str % (HIGHLIGHTS[on_color], text)
if attrs is not None:
for attr in attrs:
text = fmt_str % (ATTRIBUTES[attr], text)
text += END
return text
def cprint(text, color=None, on_color=None, attrs=None, **kwargs):
print((colored(text, color, on_color, attrs)), **kwargs)
# next bit filched from 1.5.2's inspect.py
def currentframe():
"""Return the frame object for the caller's stack frame."""
try:
raise Exception
except:
return sys.exc_info()[2].tb_frame.f_back
def findCaller():
"""
Find the stack frame of the caller so that we can note the source
file name, line number and function name.
"""
if hasattr(sys, 'frozen'): # support for py2exe
_srcfile = "logging%s__init__%s" % (os.sep, __file__[-4:])
elif __file__[-4:].lower() in ['.pyc', '.pyo']:
_srcfile = __file__[:-4] + '.py'
else:
_srcfile = __file__
_srcfile = os.path.normcase(_srcfile)
f = currentframe()
# On some versions of IronPython, currentframe() returns None if
# IronPython isn't run with -X:Frames.
if f is not None:
f = f.f_back
rv = "(unknown file)", 0, "(unknown function)"
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename == _srcfile:
f = f.f_back
continue
rv = (co.co_filename, f.f_lineno, co.co_name)
break
return rv
class TermColor():
'''
支持jenkins 支持输出颜色
'''
colormap = dict(
concern=dict(color='green', attrs=['bold']),
info=dict(color='grey'),
warn=dict(color='yellow', attrs=['bold']),
warning=dict(color='yellow', attrs=['bold']),
error=dict(color='red'),
critical=dict(color='red', attrs=['bold']),
)
def msg_format(self, mode, msg):
'''
获取调用者的 文件名、调用函数、调用行
'''
fn, lineno, co_name = findCaller()
filename = fn.split('/')[-1]
now_date = str(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'))
msg_simple = ('[-] %s - %s(%s:%s@%s): %s') % (now_date, mode, filename, co_name, str(lineno), msg)
return msg_simple
def info(self, msg):
self._log("info", msg)
def concern(self, msg):
self._log("concern", msg)
def error(self, msg):
self._log("error", msg)
def warn(self, msg):
self._log("warn", msg)
def _log(self, funcname, msg):
print(colored(self.msg_format(funcname, msg), **self.colormap[funcname]))
log = TermColor()
if __name__ == '__main__':
print('Current terminal type: %s' % os.getenv('TERM'))
print('Test basic colors:')
cprint('Grey color', 'grey')
cprint('Red color', 'red')
cprint('Green color', 'green')
cprint('Yellow color', 'yellow')
print(('-' * 78))
print('Test highlights:')
cprint('On grey color', on_color='on_grey')
cprint('On red color', on_color='on_red')
cprint('On green color', on_color='on_green')
cprint('On yellow color', on_color='on_yellow')
cprint('On blue color', on_color='on_blue')
cprint('On magenta color', on_color='on_magenta')
cprint('On cyan color', on_color='on_cyan')
cprint('On white color', color='grey', on_color='on_white')
print('-' * 78)
print('Test attributes:')
cprint('Bold grey color', 'grey', attrs=['bold'])
cprint('Dark red color', 'red', attrs=['dark'])
cprint('Underline green color', 'green', attrs=['underline'])
cprint('Blink yellow color', 'yellow', attrs=['blink'])
print(('-' * 78))
print('Test mixing:')
cprint('Underline red on grey color', 'red', 'on_grey',
['underline'])
cprint('Reversed green on red color', 'green', 'on_red', ['reverse'])
|
gpl-3.0
| 925,905,783,110,960,900
| 25.686636
| 106
| 0.516491
| false
| 3.312929
| false
| false
| false
|
mckinseyacademy/xblock-diagnosticfeedback
|
diagnostic_feedback/helpers/helper.py
|
1
|
1063
|
from __future__ import absolute_import
from . import Category, Question, Range
class MainHelper(object):
@classmethod
def save_filtered_data(cls, quiz, data):
"""
filter out & save the posted data to match our required schema for each quiz step
"""
step = data['step']
if step == 1:
quiz.title = data['title']
quiz.description = data['description']
if not quiz.quiz_type and data.get('type'):
quiz.quiz_type = data['type']
if step == 2 and quiz.quiz_type == quiz.BUZZFEED_QUIZ_VALUE:
results = Category.filter_results(data)
quiz.results = results
elif step == 2 and quiz.quiz_type == quiz.DIAGNOSTIC_QUIZ_VALUE:
results = Range.filter_results(data)
quiz.results = results
elif step == 3:
questions = Question.filter_question(data, quiz.quiz_type)
quiz.questions = questions
else:
pass
return "step {} data saved".format(step)
|
agpl-3.0
| 775,066,402,183,640,600
| 29.371429
| 93
| 0.571966
| false
| 3.951673
| false
| false
| false
|
ZeitOnline/zeit.objectlog
|
src/zeit/objectlog/source.py
|
1
|
1802
|
from zeit.objectlog.i18n import MessageFactory as _
import zc.sourcefactory.contextual
import zeit.objectlog.interfaces
import zope.app.form.browser.interfaces
import zope.i18n
import zope.interface.common.idatetime
class LogEntrySource(
zc.sourcefactory.contextual.BasicContextualSourceFactory):
def getValues(self, context):
log = zeit.objectlog.interfaces.ILog(context)
return log.get_log()
def createTerm(self, context, source, value, title, token, request):
# We got to create the title here as we haven't got the request in
# `getTitle` :(
if value.principal is None:
principal = _('System')
else:
p_source = zeit.objectlog.interfaces.ILogEntry['principal'].source
principal_terms = zope.component.getMultiAdapter(
(p_source, request), zope.app.form.browser.interfaces.ITerms)
try:
principal = principal_terms.getTerm(value.principal).title
except LookupError:
principal = value.principal
formatter = request.locale.dates.getFormatter('dateTime', 'medium')
tzinfo = zope.interface.common.idatetime.ITZInfo(request, None)
time = value.time
if tzinfo is not None:
time = time.astimezone(tzinfo)
time = formatter.format(time)
message = zope.i18n.translate(value.message, context=request)
title = _("${time} [${principal}]: ${message}",
mapping=dict(
time=time,
principal_id=value.principal,
principal=principal,
message=message))
return super(LogEntrySource, self).createTerm(
context, source, value, title, token, request)
|
bsd-3-clause
| -7,002,345,452,534,313,000
| 36.541667
| 78
| 0.624861
| false
| 4.260047
| false
| false
| false
|
dungvtdev/upsbayescpm
|
bayespy/inference/vmp/nodes/wishart.py
|
1
|
9440
|
################################################################################
# Copyright (C) 2011-2012,2014 Jaakko Luttinen
#
# This file is licensed under the MIT License.
################################################################################
import numpy as np
import scipy.special as special
from bayespy.utils import misc, linalg
from .expfamily import ExponentialFamily
from .expfamily import ExponentialFamilyDistribution
from .expfamily import useconstructor
from .constant import Constant
from .deterministic import Deterministic
from .gamma import GammaMoments
from .node import Moments, Node
class WishartPriorMoments(Moments):
def __init__(self, k):
self.k = k
self.dims = ( (), () )
return
def compute_fixed_moments(self, n):
""" Compute moments for fixed x. """
u0 = np.asanyarray(n)
u1 = special.multigammaln(0.5*u0, self.k)
return [u0, u1]
@classmethod
def from_values(cls, x, d):
""" Compute the dimensions of phi or u. """
return cls(d)
class WishartMoments(Moments):
def __init__(self, shape):
self.shape = shape
self.ndim = len(shape)
self.dims = ( 2 * shape, () )
return
def compute_fixed_moments(self, Lambda, gradient=None):
""" Compute moments for fixed x. """
L = linalg.chol(Lambda, ndim=self.ndim)
ldet = linalg.chol_logdet(L, ndim=self.ndim)
u = [Lambda,
ldet]
if gradient is None:
return u
du0 = gradient[0]
du1 = (
misc.add_trailing_axes(gradient[1], 2*self.ndim)
* linalg.chol_inv(L, ndim=self.ndim)
)
du = du0 + du1
return (u, du)
def plates_from_shape(self, shape):
if self.ndim == 0:
return shape
else:
return shape[:-2*self.ndim]
def shape_from_plates(self, plates):
return plates + self.shape + self.shape
def get_instance_conversion_kwargs(self):
return dict(ndim=self.ndim)
def get_instance_converter(self, ndim):
if ndim != self.ndim:
raise NotImplementedError(
"No conversion between different ndim implemented for "
"WishartMoments yet"
)
return None
@classmethod
def from_values(cls, x, ndim):
""" Compute the dimensions of phi and u. """
if np.ndim(x) < 2 * ndim:
raise ValueError("Values for Wishart distribution must be at least "
"2-D arrays.")
if np.shape(x)[-ndim:] != np.shape(x)[-2*ndim:-ndim]:
raise ValueError("Values for Wishart distribution must be square "
"matrices, thus the two last axes must have equal "
"length.")
shape = np.shape(x)[-ndim:]
return cls(shape)
class WishartDistribution(ExponentialFamilyDistribution):
"""
Sub-classes implement distribution specific computations.
Distribution for :math:`k \times k` symmetric positive definite matrix.
.. math::
\Lambda \sim \mathcal{W}(n, V)
Note: :math:`V` is inverse scale matrix.
.. math::
p(\Lambda | n, V) = ..
"""
def compute_message_to_parent(self, parent, index, u_self, u_n, u_V):
if index == 0:
raise NotImplementedError("Message from Wishart to degrees of "
"freedom parameter (first parent) "
"not yet implemented")
elif index == 1:
Lambda = u_self[0]
n = u_n[0]
return [-0.5 * Lambda,
0.5 * n]
else:
raise ValueError("Invalid parent index {0}".format(index))
def compute_phi_from_parents(self, u_n, u_V, mask=True):
r"""
Compute natural parameters
.. math::
\phi(n, V) =
\begin{bmatrix}
-\frac{1}{2} V
\\
\frac{1}{2} n
\end{bmatrix}
"""
return [-0.5 * u_V[0],
0.5 * u_n[0]]
def compute_moments_and_cgf(self, phi, mask=True):
r"""
Return moments and cgf for given natural parameters
.. math::
\langle u \rangle =
\begin{bmatrix}
\phi_2 (-\phi_1)^{-1}
\\
-\log|-\phi_1| + \psi_k(\phi_2)
\end{bmatrix}
\\
g(\phi) = \phi_2 \log|-\phi_1| - \log \Gamma_k(\phi_2)
"""
U = linalg.chol(-phi[0])
k = np.shape(phi[0])[-1]
#k = self.dims[0][0]
logdet_phi0 = linalg.chol_logdet(U)
u0 = phi[1][...,np.newaxis,np.newaxis] * linalg.chol_inv(U)
u1 = -logdet_phi0 + misc.multidigamma(phi[1], k)
u = [u0, u1]
g = phi[1] * logdet_phi0 - special.multigammaln(phi[1], k)
return (u, g)
def compute_cgf_from_parents(self, u_n, u_V):
r"""
CGF from parents
.. math::
g(n, V) = \frac{n}{2} \log|V| - \frac{nk}{2} \log 2 -
\log \Gamma_k(\frac{n}{2})
"""
n = u_n[0]
gammaln_n = u_n[1]
V = u_V[0]
logdet_V = u_V[1]
k = np.shape(V)[-1]
g = 0.5*n*logdet_V - 0.5*k*n*np.log(2) - gammaln_n
return g
def compute_fixed_moments_and_f(self, Lambda, mask=True):
r"""
Compute u(x) and f(x) for given x.
.. math:
u(\Lambda) =
\begin{bmatrix}
\Lambda
\\
\log |\Lambda|
\end{bmatrix}
"""
k = np.shape(Lambda)[-1]
ldet = linalg.chol_logdet(linalg.chol(Lambda))
u = [Lambda,
ldet]
f = -(k+1)/2 * ldet
return (u, f)
class Wishart(ExponentialFamily):
r"""
Node for Wishart random variables.
The random variable :math:`\mathbf{\Lambda}` is a :math:`D\times{}D`
positive-definite symmetric matrix.
.. math::
p(\mathbf{\Lambda}) = \mathrm{Wishart}(\mathbf{\Lambda} | N,
\mathbf{V})
Parameters
----------
n : scalar or array
:math:`N`, degrees of freedom, :math:`N>D-1`.
V : Wishart-like node or (...,D,D)-array
:math:`\mathbf{V}`, scale matrix.
"""
_distribution = WishartDistribution()
def __init__(self, n, V, **kwargs):
"""
Create Wishart node.
"""
super().__init__(n, V, **kwargs)
@classmethod
def _constructor(cls, n, V, **kwargs):
"""
Constructs distribution and moments objects.
"""
# Make V a proper parent node and get the dimensionality of the matrix
V = cls._ensure_moments(V, WishartMoments, ndim=1)
D = V.dims[0][-1]
n = cls._ensure_moments(n, WishartPriorMoments, d=D)
moments = WishartMoments((D,))
# Parent node message types
parent_moments = (n._moments, V._moments)
parents = [n, V]
return (parents,
kwargs,
moments.dims,
cls._total_plates(kwargs.get('plates'),
cls._distribution.plates_from_parent(0, n.plates),
cls._distribution.plates_from_parent(1, V.plates)),
cls._distribution,
moments,
parent_moments)
def scale(self, scalar, **kwargs):
return _ScaledWishart(self, scalar, **kwargs)
def __str__(self):
n = 2*self.phi[1]
A = 0.5 * self.u[0] / self.phi[1][...,np.newaxis,np.newaxis]
return ("%s ~ Wishart(n, A)\n"
" n =\n"
"%s\n"
" A =\n"
"%s\n"
% (self.name, n, A))
class _ScaledWishart(Deterministic):
def __init__(self, Lambda, alpha, ndim=None, **kwargs):
if ndim is None:
try:
ndim = Lambda._moments.ndim
except AttributeError:
raise ValueError("Give explicit ndim argument. (ndim=1 for normal matrix)")
Lambda = self._ensure_moments(Lambda, WishartMoments, ndim=ndim)
alpha = self._ensure_moments(alpha, GammaMoments)
dims = Lambda.dims
self._moments = Lambda._moments
self._parent_moments = (Lambda._moments, alpha._moments)
return super().__init__(Lambda, alpha, dims=dims, **kwargs)
def _compute_moments(self, u_Lambda, u_alpha):
Lambda = u_Lambda[0]
logdet_Lambda = u_Lambda[1]
alpha = misc.add_trailing_axes(u_alpha[0], 2*self._moments.ndim)
logalpha = u_alpha[1]
u0 = Lambda * alpha
u1 = logdet_Lambda + np.prod(self._moments.shape) * logalpha
return [u0, u1]
def _compute_message_to_parent(self, index, m, u_Lambda, u_alpha):
if index == 0:
alpha = misc.add_trailing_axes(u_alpha[0], 2*self._moments.ndim)
logalpha = u_alpha[1]
m0 = m[0] * alpha
m1 = m[1]
return [m0, m1]
if index == 1:
Lambda = u_Lambda[0]
logdet_Lambda = u_Lambda[1]
m0 = linalg.inner(m[0], Lambda, ndim=2*self._moments.ndim)
m1 = m[1] * np.prod(self._moments.shape)
return [m0, m1]
raise IndexError()
|
mit
| 6,343,873,845,033,821,000
| 25.591549
| 91
| 0.504661
| false
| 3.608563
| false
| false
| false
|
LePtitLilou/vcsmp
|
Lib/utils.py
|
1
|
16904
|
# Adapted for numpy/ma/cdms2 by convertcdms.py
import numpy
import cdtime
class VCSUtilsError (Exception):
def __init__ (self, args=None):
"""Create an exception"""
self.args = args
def __str__(self):
"""Calculate the string representation"""
return str(self.args)
__repr__ = __str__
def minmax(*data) :
'''
Function : minmax
Description of Function
Return the minimum and maximum of a serie of array/list/tuples (or combination of these)
Values those absolute value are greater than 1.E20, are masked
You can combined list/tuples/... pretty much any combination is allowed
Examples of Use
>>> s=range(7)
>>> vcs.minmax(s)
(0.0, 6.0)
>>> vcs.minmax([s,s])
(0.0, 6.0)
>>> vcs.minmax([[s,s*2],4.,[6.,7.,s]],[5.,-7.,8,(6.,1.)])
(-7.0, 8.0)
'''
mx=-1.E77
mn=1.E77
if len(data)==1 : data=data[0]
global myfunction
def myfunction(d,mx,mn):
if d is None:
return mx,mn
from numpy.ma import maximum,minimum,masked_where,absolute,greater,count
try:
d=masked_where(greater(absolute(d),9.9E19),d)
if count(d)==0 : return mx,mn
mx=float(maximum(mx,float(maximum(d))))
mn=float(minimum(mn,float(minimum(d))))
except:
for i in d:
mx,mn=myfunction(i,mx,mn)
return mx,mn
mx,mn=myfunction(data,mx,mn)
if mn==1.E77 and mx==-1.E77 :mn,mx=1.E20,1.E20
return mn,mx
def mkevenlevels(n1,n2,nlev=10):
'''
Function : mkevenlevels
Description of Function:
Return a serie of evenly spaced levels going from n1 to n2
by default 10 intervals will be produced
Examples of use:
>>> vcs.mkevenlevels(0,100)
[0.0, 10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
>>> vcs.mkevenlevels(0,100,nlev=5)
[0.0, 20.0, 40.0, 60.0, 80.0, 100.0]
>>> vcs.mkevenlevels(100,0,nlev=5)
[100.0, 80.0, 60.0, 40.0, 20.0, 0.0]
'''
import numpy.ma
lev=numpy.ma.arange(nlev+1,dtype=numpy.float)
factor=float(n2-n1)/nlev
lev=factor*lev
lev=lev+n1
return list(lev)
def mkscale(n1,n2,nc=12,zero=1):
'''
Function: mkscale
Description of function:
This function return a nice scale given a min and a max
option:
nc # Maximum number of intervals (default=12)
zero # Not all implemented yet so set to 1 but values will be:
-1: zero MUST NOT be a contour
0: let the function decide # NOT IMPLEMENTED
1: zero CAN be a contour (default)
2: zero MUST be a contour
Examples of Use:
>>> vcs.mkscale(0,100)
[0.0, 10.0, 20.0, 30.0, 40.0, 50.0, 60.0, 70.0, 80.0, 90.0, 100.0]
>>> vcs.mkscale(0,100,nc=5)
[0.0, 20.0, 40.0, 60.0, 80.0, 100.0]
>>> vcs.mkscale(-10,100,nc=5)
[-25.0, 0.0, 25.0, 50.0, 75.0, 100.0]
>>> vcs.mkscale(-10,100,nc=5,zero=-1)
[-20.0, 20.0, 60.0, 100.0]
>>> vcs.mkscale(2,20)
[2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0]
>>> vcs.mkscale(2,20,zero=2)
[0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0]
'''
if n1==n2 : return [n1]
import numpy
nc=int(nc)
cscale=0 # ???? May be later
min, max=minmax(n1,n2)
if zero>1.:
if min>0. : min=0.
if max<0. : max=0.
rg=float(max-min) # range
delta=rg/nc # basic delta
# scale delta to be >10 and <= 100
lg=-numpy.log10(delta)+2.
il=numpy.floor(lg)
delta=delta*(10.**il)
max=max*(10.**il)
min=min*(10.**il)
if zero>-0.5:
if delta<=20.:
delta=20
elif delta<=25. :
delta=25
elif delta<=40. :
delta=40
elif delta<=50. :
delta=50
elif delta<=101. :
delta=100
first = numpy.floor(min/delta)-1.
else:
if delta<=20.:
delta=20
elif delta<=40. :
delta=40
elif delta<=60. :
delta=60
elif delta<=101. :
delta=100
first=numpy.floor(min/delta)-1.5
scvals=delta*(numpy.arange(2*nc)+first)
a=0
for j in range(len(scvals)):
if scvals[j]>min :
a=j-1
break
b=0
for j in range(len(scvals)):
if scvals[j]>=max :
b=j+1
break
if cscale==0:
cnt=scvals[a:b]/10.**il
else:
#not done yet...
raise VCSUtilsError,'ERROR scale not implemented in this function'
return list(cnt)
def __split2contiguous(levels):
""" Function __split2contiguous(levels)
takes list of split intervals and make it contiguous if possible
"""
tmplevs=[]
for il in range(len(levels)):
lv=levels[il]
if not (isinstance(lv,list) or isinstance(lv,tuple)):
raise VCSUtilsError,"Error levels must be a set of intervals"
if not len(lv)==2: raise VCSUtilsError,"Error intervals can only have 2 elements"
if il!=0:
lv2=levels[il-1]
if lv2[1]!=lv[0]:
raise VCSUtilsError,"Error intervals are NOT contiguous from "+str(lv2[1])+" to "+str(lv[0])
tmplevs.append(lv[0])
tmplevs.append(levels[-1][1])
return tmplevs
def mklabels(vals,output='dict'):
'''
Function : mklabels
Description of Function:
This function gets levels and output strings for nice display of the levels values, returns a dictionary unless output="list" specified
Examples of use:
>>> a=vcs.mkscale(2,20,zero=2)
>>> vcs.mklabels (a)
{20.0: '20', 18.0: '18', 16.0: '16', 14.0: '14', 12.0: '12', 10.0: '10', 8.0: '8', 6.0: '6', 4.0: '4', 2.0: '2', 0.0: '0'}
>>> vcs.mklabels ( [5,.005])
{0.0050000000000000001: '0.005', 5.0: '5.000'}
>>> vcs.mklabels ( [.00002,.00005])
{2.0000000000000002e-05: '2E-5', 5.0000000000000002e-05: '5E-5'}
>>> vcs.mklabels ( [.00002,.00005],output='list')
['2E-5', '5E-5']
'''
import string,numpy.ma
if isinstance(vals[0],list) or isinstance(vals[0],tuple):
vals=__split2contiguous(vals)
vals=numpy.ma.asarray(vals)
nvals=len(vals)
ineg=0
ext1=0
ext2=0
# Finds maximum number to write
amax=float(numpy.ma.maximum(numpy.ma.absolute(vals)))
if amax==0 :
if string.lower(output[:3])=='dic' :
return {0:'0'}
else:
return ['0']
amin,amax=minmax(numpy.ma.masked_equal(numpy.ma.absolute(vals),0))
ratio=amax/amin
if int(numpy.ma.floor(numpy.ma.log10(ratio)))+1>6:
lbls=[]
for i in range(nvals):
if vals[i]!=0:
lbls.append(mklabels([vals[i]],output='list')[0])
else:
lbls.append('0')
if string.lower(output[:3])=='dic':
dic={}
for i in range(len(vals)):
dic[float(vals[i])]=lbls[i]
return dic
else:
return lbls
tmax=float(numpy.ma.maximum(vals))
if tmax<0. :
ineg=1
vals=-vals
amax=float(numpy.ma.maximum(vals))
# Number of digit on the left of decimal point
idigleft=int(numpy.ma.floor(numpy.ma.log10(amax)))+1
# Now determine the number of significant figures
idig=0
for i in range(nvals):
aa=numpy.ma.power(10.,-idigleft)
while abs(round(aa*vals[i])-aa*vals[i])>.000001 : aa=aa*10.
idig=numpy.ma.maximum(idig,numpy.ma.floor(numpy.ma.log10(aa*numpy.ma.power(10.,idigleft))))
idig=int(idig)
# Now does the writing part
lbls=[]
# First if we need an E format
if idigleft>5 or idigleft<-2:
if idig==1:
for i in range(nvals):
aa=int(round(vals[i]/numpy.ma.power(10.,idigleft-1)))
lbls.append(str(aa)+'E'+str(idigleft-1))
else:
for i in range(nvals):
aa=str(vals[i]/numpy.ma.power(10.,idigleft-1))
ii=1
if vals[i]<0. : ii=2
aa=string.ljust(aa,idig+ii)
aa=string.replace(aa,' ','0')
lbls.append(aa+'E'+str(idigleft-1))
elif idigleft>0 and idigleft>=idig: #F format
for i in range(nvals):
lbls.append(str(int(round(vals[i]))))
else:
for i in range(nvals):
ii=1
if vals[i]<0.: ii=2
ndig=idig+ii
rdig=idig-idigleft
if idigleft<0 : ndig=idig-idigleft+1+ii
aa='%'+str(ndig)+'.'+str(rdig)+'f'
aa=aa % vals[i]
lbls.append(aa)
if ineg:
vals=-vals
for i in range(len(lbls)):
lbls[i]='-'+lbls[i]
if string.lower(output[:3])=='dic':
dic={}
for i in range(len(vals)):
dic[float(vals[i])]=str(lbls[i])
return dic
else:
return lbls
def getcolors(levs,colors=range(16,240),split=1,white=240):
'''
Function : getcolors(levs,colors=range(16,240),split=1,white=240)
Description of Function:
For isofill/boxfill purposes
Given a list of levels this function returns the colors that would best spread a list of "user-defined" colors (default is 16 to 239 , i.e 224 colors), always using the first and last color. Optionally the color range can be split into 2 equal domain to represent <0 and >0 values.
If the colors are split an interval goes from <0 to >0 then this is assigned the "white" color
Usage:
levs : levels defining the color ranges
colors (default= range(16,240) ) : A list/tuple of the of colors you wish to use
split # parameter to split the colors between 2 equal domain:
one for positive values and one for negative values
0 : no split
1 : split if the levels go from <0 to >0
2 : split even if all the values are positive or negative
white (=240) # If split is on and an interval goes from <0 to >0 this color number will be used within this interval (240 is white in the default VCS palette color)
Examples of Use:
>>> a=[0.0, 2.0, 4.0, 6.0, 8.0, 10.0, 12.0, 14.0, 16.0, 18.0, 20.0]
>>> vcs.getcolors (a)
[16, 41, 66, 90, 115, 140, 165, 189, 214, 239]
>>> vcs.getcolors (a,colors=range(16,200))
[16, 36, 57, 77, 97, 118, 138, 158, 179, 199]
>>> vcs.getcolors(a,colors=[16,25,15,56,35,234,12,11,19,32,132,17])
[16, 25, 15, 35, 234, 12, 11, 32, 132, 17]
>>> a=[-6.0, -2.0, 2.0, 6.0, 10.0, 14.0, 18.0, 22.0, 26.0]
>>> vcs.getcolors (a,white=241)
[72, 241, 128, 150, 172, 195, 217, 239]
>>> vcs.getcolors (a,white=241,split=0)
[16, 48, 80, 112, 143, 175, 207, 239]
'''
import string
if len(levs)==1: return [colors[0]]
if isinstance(levs[0],list) or isinstance(levs[0],tuple):
tmplevs=[levs[0][0]]
for i in range(len(levs)):
if i!=0:
if levs[i-1][1]*levs[i][0]<0.:
tmplevs[-1]=0.
tmplevs.append(levs[i][1])
levs=tmplevs
# Take care of the input argument split
if isinstance(split,str):
if split.lower()=='no' :
split=0
elif split.lower()=='force' :
split=2
else :
split=1
# Take care of argument white
if isinstance(white,str): white=string.atoi(white)
# Gets first and last value, and adjust if extensions
mn=levs[0]
mx=levs[-1]
# If first level is < -1.E20 then use 2nd level for mn
if levs[0]<=-9.E19 and levs[1]>0. : mn=levs[1]
# If last level is > 1.E20 then use 2nd to last level for mx
if levs[-1]>=9.E19 and levs[-2]<0. : mx=levs[-2]
# Do we need to split the palette in 2 ?
sep=0
if mx*mn<0. and split==1 : sep=1
if split==2 : sep=1
# Determine the number of colors to use
nc=len(levs)-1
## In case only 2 levels, i.e only one color to return
if nc==1:
if split>0 and levs[0]*levs[1]<=0: # Change of sign
return white
else:
return colors[0]
# Number of colors passed
ncols=len(colors)
k=0 #???
col=[]
# Counts the number of negative colors
nn=0 # initialize
#if (mn<=0.) and (levs[0]<=-9.E19) : nn=nn+1 # Ext is one more <0 box
zr=0 # Counter to know if you stop by zero or it is included in a level
for i in range(nc):
if levs[i]<0.: nn=nn+1 # Count nb of <0 box
if levs[i]==0.: zr=1 # Do we stop at zero ?
np=nc-nn # Nb of >0 box is tot - neg -1 for the blank box
if mx*mn<0. and zr==0 :nn=nn-1 # we have a split cell bet + and - so remove a -
# Determine the interval (in colors) between each level
cinc=(ncols-1.)/float(nc-1.)
# Determine the interval (in colors) between each level (neg)
cincn=0.
if nn!=0 and nn!=1 : cincn=(ncols/2.-1.)/float(nn-1.)
# Determine the interval (in colors) between each level (pos)
cincp=0
isplit=0
if np!=0 and np!=1 : cincp=(ncols/2.-1.)/float(np-1.)
if sep!=1:
for i in xrange(nc):
cv=i*cinc
col.append(colors[int(round(cv))])
else:
colp=[]
coln=[]
col=[]
for i in xrange(nc):
if levs[i] < 0 :
cv=i*cincn
# if nn==1 : cv=len(colors)/4. # if only 1 neg then use the middle of the neg colors
if (levs[i])*(levs[i+1])<0 :
col.append(white)
isplit=1
else:
col.append(colors[int(round(cv))])
else:
if np==1 : cv=3*len(colors)/4. # if only 1 pos then use the middle of the pos colors
cv=ncols/2.+(i-nn-isplit)*cincp
col.append(colors[int(round(cv))])
if col[0]==white and levs[0]<-9.E19: col[0]=colors[0]
return col
def generate_time_labels(d1,d2,units,calendar=cdtime.DefaultCalendar):
""" generate_time_labels(self,d1,d2,units,calendar=cdtime.DefaultCalendar)
returns a dictionary of time labels for an interval of time, in a user defined units system
d1 and d2 must be cdtime object, if not they will be assumed to be in "units"
Example:
lbls = generate_time_labels(cdtime.reltime(0,'months since 2000'),
cdtime.reltime(12,'months since 2000'),
'days since 1800',
)
This generated a dictionary of nice time labels for the year 2000 in units of 'days since 1800'
lbls = generate_time_labels(cdtime.reltime(0,'months since 2000'),
cdtime.comptime(2001),
'days since 1800',
)
This generated a dictionary of nice time labels for the year 2000 in units of 'days since 1800'
lbls = generate_time_labels(0,
12,
'months since 2000',
)
This generated a dictionary of nice time labels for the year 2000 in units of 'months since 2000'
"""
if isinstance(d1,(int,long,float)):
d1=cdtime.reltime(d1,units)
if isinstance(d2,(int,long,float)):
d2=cdtime.reltime(d2,units)
d1r=d1.torel(units,calendar)
d2r=d2.torel(units,calendar)
d1,d2=minmax(d1r.value,d2r.value)
u=units.split('since')[0].strip().lower()
dic={}
if u in ['month','months']:
delta=(d2-d1)*30
elif u in ['year','years']:
delta=(d2-d1)*365
elif u in ['hours','hour']:
delta=(d2-d1)/24.
elif u in ['minute','minutes']:
delta=(d2-d1)/24./60.
elif u in ['second','seconds']:
delta=(d2-d1)/24./60.
else:
delta=d2-d1
if delta<.042: # less than 1 hour
levs=mkscale(d1,d2)
for l in levs:
dic[l]=str(cdtime.reltime(l,units).tocomp(calendar))
elif delta<1: # Less than a day put a label every hours
d1=d1r.torel('hours since 2000').value
d2=d2r.torel('hours since 2000').value
d1,d2=minmax(d1,d2)
levs=mkscale(d1,d2)
for l in levs:
t=cdtime.reltime(l,'hours since 2000').tocomp(calendar)
if t.minute>30:
t=t.add(1,cdtime.Hour)
t.minute=0
t.second=0
tr=t.torel(units,calendar)
dic[tr.value]=str(t).split(':')[0]
elif delta<90: # Less than 3 month put label every day
d1=d1r.torel('days since 2000').value
d2=d2r.torel('days since 2000').value
d1,d2=minmax(d1,d2)
levs=mkscale(d1,d2)
for l in levs:
t=cdtime.reltime(l,'days since 2000').tocomp(calendar)
if t.hour>12:
t=t.add(1,cdtime.Day)
t.hour=0
t.minute=0
t.second=0
tr=t.torel(units,calendar)
dic[tr.value]=str(t).split(' ')[0]
elif delta<800: # ~ Less than 24 month put label every month
d1=d1r.torel('months since 2000').value
d2=d2r.torel('months since 2000').value
d1,d2=minmax(d1,d2)
levs=mkscale(d1,d2)
for l in levs:
t=cdtime.reltime(l,'months since 2000').tocomp(calendar)
if t.day>15:
t=t.add(1,cdtime.Month)
t.day=1
t.hour=0
t.minute=0
t.second=0
tr=t.torel(units,calendar)
dic[tr.value]='-'.join(str(t).split('-')[:2])
else: # ok lots of years, let auto decide but always puts at Jan first
d1=d1r.torel('years since 2000').value
d2=d2r.torel('years since 2000').value
d1,d2=minmax(d1,d2)
levs=mkscale(d1,d2)
for l in levs:
t=cdtime.reltime(l,'years since 2000').tocomp(calendar)
if t.month>6:
t=t.add(1,cdtime.Year)
t.month=1
t.day=1
t.hour=0
t.minute=0
t.second=0
tr=t.torel(units,calendar)
dic[tr.value]=str(t).split('-')[0]
return dic
|
lgpl-3.0
| 8,316,699,669,609,183,000
| 31.383142
| 284
| 0.585424
| false
| 2.879727
| false
| false
| false
|
sonymoon/algorithm
|
src/main/python/leetcode-python/easy/206.Reverse Linked List.py
|
1
|
1534
|
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
def __str__(self):
return self.x
class Solution:
def reverseList(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if not head:
return head
pre = None
while head is not None:
temp = head.next
head.next = pre
pre = head
head = temp
return pre
def stringToIntegerList(input):
import json
return json.loads(input)
def stringToListNode(input):
# Generate list from the input
numbers = stringToIntegerList(input)
# Now convert that list into linked list
dummyRoot = ListNode(0)
ptr = dummyRoot
for number in numbers:
ptr.next = ListNode(number)
ptr = ptr.next
ptr = dummyRoot.next
return ptr
def listNodeToString(node):
if not node:
return "[]"
result = ""
while node:
result += str(node.val) + ", "
node = node.next
return "[" + result[:-2] + "]"
def main():
import sys
def readlines():
for line in sys.stdin:
yield line.strip('\n')
lines = readlines()
while True:
try:
line = next(lines)
head = stringToListNode(line);
ret = Solution().reverseList(head)
out = listNodeToString(ret);
print(out)
except StopIteration:
break
if __name__ == '__main__':
main()
|
apache-2.0
| -5,421,362,284,684,240,000
| 18.417722
| 46
| 0.52412
| false
| 4.134771
| false
| false
| false
|
bregmanstudio/BregmanToolkit
|
bregman/examples/6_dissonance.py
|
1
|
2827
|
# MUSIC014/102 - Music, Information, Neuroscience,
# Week 1 Lab
# Using the Plompt and Levelt dissonance function
#
# Professor Michael Casey, 1/7/2015
from pylab import *
from bregman.suite import *
import scipy.signal as signal
import pdb
def ideal_chromatic_dissonance(num_harmonics=7, f0=440):
"""
One octave of chromatic dissonance values
"""
harms = arange(num_harmonics)+1
freqs = [f0*i for i in harms]
amps = [exp(-.5*i) for i in harms]
freqs2 = array([[f0*2**(k/12.)*i for i in harms] for k in range(0,13)])
all_amps = r_[amps,amps]
diss = []
for f in freqs2:
all_freqs = r_[freqs,f]
idx = all_freqs.argsort()
diss.append(dissonance_fun(all_freqs[idx], all_amps[idx]))
return array(diss)
def get_peaks(F):
"""
Extract peaks from linear spectrum in F
Algorithm 1: zero-crossings of derivative of smoothed spectrum
"""
X = F.X.copy()
b,a = signal.butter(10, .25) # lp filter coefficients
# Smoothing
signal.filtfilt(b,a,X,axis=0)
# Derivative
Xd = diff(X,axis=0)
# Zero crossing
thresh=1e-9
peak_idx = []
for i,x in enumerate(Xd.T):
idx = where((x[:-1]>thresh)&(x[1:]<-thresh))[0] + 1
if len(idx):
idx = idx[X[idx,i].argsort()][::-1]
peak_idx.append(idx)
return peak_idx
def audio_chromatic_scale(f0=440, num_harmonics=7):
N = 11025
nH = num_harmonics
H = vstack([harmonics(f0=f0*2**(k/12.),num_harmonics=nH, num_points=N) for k in arange(13)])
return H
def audio_chromatic_dissonance(f0=440, num_harmonics=7, num_peaks=10):
sr = 44100
nfft = 8192
afreq = sr/nfft
H = audio_chromatic_scale(f0=f0, num_harmonics=num_harmonics)
h0 = H[0]
diss = []
for i,h in enumerate(H):
F = LinearFrequencySpectrum((h0+h)/2.,nfft=nfft,wfft=nfft/2,nhop=nfft/4)
P = get_peaks(F)
frame = []
for j,p in enumerate(P):
freqs = afreq*p[:num_peaks] # take middle frame as reference
mags = F.X[p[:num_peaks],j]
idx = freqs.argsort()
frame.append(dissonance_fun(freqs[idx],mags[idx]))
diss.append(array(frame).mean())
return array(diss)
def dissonance_plot(f0=440, num_harmonics=7, num_peaks=10):
figure()
diss_i = ideal_chromatic_dissonance(f0=f0, num_harmonics=num_harmonics)
diss = audio_chromatic_dissonance(f0=f0, num_harmonics=num_harmonics, num_peaks=num_peaks)
plot(diss_i / diss_i.max(), linestyle='--', linewidth=2)
plot(diss / diss.max())
t_str = 'f0=%d, partials=%d, peaks=%d'%(f0,num_harmonics,num_peaks)
title('Dissonance (chromatic): '+t_str,fontsize=16)
legend(['ideal','estimated'])
xlabel('Pitch class (chroma)',fontsize=14)
ylabel('Dissonance',fontsize=14)
grid()
|
mit
| 2,914,253,153,146,188,300
| 31.494253
| 96
| 0.615493
| false
| 2.715658
| false
| false
| false
|
cherbib/fofix
|
src/Guitar.py
|
1
|
95964
|
#####################################################################
# -*- coding: iso-8859-1 -*- #
# #
# Frets on Fire #
# Copyright (C) 2006 Sami Kyostila #
# 2008 Alarian #
# 2008 myfingershurt #
# 2008 Capo #
# 2008 Glorandwarf #
# 2008 QQStarS #
# 2008 Blazingamer #
# 2008 evilynux <evilynux@gmail.com> #
# #
# This program is free software; you can redistribute it and/or #
# modify it under the terms of the GNU General Public License #
# as published by the Free Software Foundation; either version 2 #
# of the License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, #
# MA 02110-1301, USA. #
#####################################################################
from Song import Note, Tempo
from Mesh import Mesh
from Neck import Neck
import random
from copy import deepcopy
from Shader import shaders
from OpenGL.GL import *
import math
#myfingershurt: needed for multi-OS file fetching
import os
import Log
import Song #need the base song defines as well
from Instrument import *
class Guitar(Instrument):
def __init__(self, engine, playerObj, editorMode = False, player = 0, bass = False):
Instrument.__init__(self, engine, playerObj, player)
self.isDrum = False
self.isBassGuitar = bass
self.isVocal = False
self.debugMode = False
self.gameMode2p = self.engine.world.multiMode
self.matchingNotes = []
self.starSpinFrameIndex = 0
self.starSpinFrames = 16
self.logClassInits = self.engine.config.get("game", "log_class_inits")
if self.logClassInits == 1:
Log.debug("Guitar class init...")
#death_au: fixed neck size
#if self.engine.theme.twoDnote == False or self.engine.theme.twoDkeys == False:
#self.boardWidth = 3.6
#self.boardLength = 9.0
self.lastPlayedNotes = [] #MFH - for reverting when game discovers it implied incorrectly
self.missedNotes = []
self.missedNoteNums = []
self.editorMode = editorMode
#########For Animations
self.Animspeed = 30#Lower value = Faster animations
#For Animated Starnotes
self.indexCount = 0
#Alarian, For animated hitglow
self.HCountAni = False
#myfingershurt:
self.hopoStyle = self.engine.config.get("game", "hopo_system")
self.gh2sloppy = self.engine.config.get("game", "gh2_sloppy")
if self.gh2sloppy == 1:
self.hopoStyle = 4
self.sfxVolume = self.engine.config.get("audio", "SFX_volume")
#blazingamer
self.killfx = self.engine.config.get("performance", "killfx")
self.killCount = 0
self.bigMax = 1
#Get theme
themename = self.engine.data.themeLabel
#now theme determination logic is only in data.py:
self.theme = self.engine.data.theme
self.oFlash = None
#myfingershurt:
self.bassGrooveNeckMode = self.engine.config.get("game", "bass_groove_neck")
self.starspin = self.engine.config.get("performance", "starspin")
if self.twoDnote == True:
#Spinning starnotes or not?
#myfingershurt: allowing any non-Rock Band theme to have spinning starnotes if the SpinNotes.png is available in that theme's folder
if self.starspin == True and self.theme < 2:
#myfingershurt: check for SpinNotes, if not there then no animation
if self.gameMode2p == 6:
if engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"spinnotesbattle.png")):
self.starSpinFrames = 8
else:
self.starspin = False
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notesbattle.png")):
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"spinnotes.png")):
self.starspin = False
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
if self.gameMode2p == 6:
if not engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notesbattle.png")):
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
else:
engine.loadImgDrawing(self, "noteButtons", os.path.join("themes",themename,"notes.png"))
#mfh - adding fallback for beta option
else:
#MFH - can't use IOError for fallback logic for a Mesh() call...
if self.engine.fileExists(os.path.join("themes", themename, "note.dae")):
engine.resource.load(self, "noteMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "note.dae")))
else:
engine.resource.load(self, "noteMesh", lambda: Mesh(engine.resource.fileName("note.dae")))
for i in range(5):
if engine.loadImgDrawing(self, "notetex"+chr(97+i), os.path.join("themes", themename, "notetex_"+chr(97+i)+".png")):
self.notetex = True
else:
self.notetex = False
break
if self.engine.fileExists(os.path.join("themes", themename, "star.dae")):
engine.resource.load(self, "starMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "star.dae")))
else:
self.starMesh = None
for i in range(5):
if engine.loadImgDrawing(self, "startex"+chr(97+i), os.path.join("themes", themename, "startex_"+chr(97+i)+".png")):
self.startex = True
else:
self.startex = False
break
for i in range(5):
if engine.loadImgDrawing(self, "staratex"+chr(97+i), os.path.join("themes", themename, "staratex_"+chr(97+i)+".png")):
self.staratex = True
else:
self.staratex = False
break
if self.gameMode2p == 6:
if not engine.loadImgDrawing(self, "battleFrets", os.path.join("themes", themename,"battle_frets.png")):
self.battleFrets = None
if self.twoDkeys == True:
engine.loadImgDrawing(self, "fretButtons", os.path.join("themes",themename,"fretbuttons.png"))
else:
defaultKey = False
#MFH - can't use IOError for fallback logic for a Mesh() call...
if self.engine.fileExists(os.path.join("themes", themename, "key.dae")):
engine.resource.load(self, "keyMesh", lambda: Mesh(engine.resource.fileName("themes", themename, "key.dae")))
else:
engine.resource.load(self, "keyMesh", lambda: Mesh(engine.resource.fileName("key.dae")))
defaultKey = True
if defaultKey:
self.keytex = False
else:
for i in range(5):
if engine.loadImgDrawing(self, "keytex"+chr(97+i), os.path.join("themes", themename, "keytex_"+chr(97+i)+".png")):
self.keytex = True
else:
self.keytex = False
break
#inkk: loading theme-dependant tail images
#myfingershurt: must ensure the new tails don't affect the Rock Band mod...
self.simpleTails = False
for i in range(0,7):
if not engine.loadImgDrawing(self, "tail"+str(i), os.path.join("themes",themename,"tails","tail"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "taile"+str(i), os.path.join("themes",themename,"tails","taile"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "btail"+str(i), os.path.join("themes",themename,"tails","btail"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if not engine.loadImgDrawing(self, "btaile"+str(i), os.path.join("themes",themename,"tails","btaile"+str(i)+".png"), textureSize = (128, 128)):
self.simpleTails = True
break
if self.simpleTails:
Log.debug("Simple tails used; complex tail loading error...")
if not engine.loadImgDrawing(self, "tail1", os.path.join("themes",themename,"tail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "tail1", "tail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "tail2", os.path.join("themes",themename,"tail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "tail2", "tail2.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "bigTail1", os.path.join("themes",themename,"bigtail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "bigTail1", "bigtail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "bigTail2", os.path.join("themes",themename,"bigtail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "bigTail2", "bigtail2.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "kill1", os.path.join("themes", themename, "kill1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "kill1", "kill1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "kill2", os.path.join("themes", themename, "kill2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "kill2", "kill2.png", textureSize = (128, 128))
#MFH - freestyle tails (for drum fills & BREs)
if not engine.loadImgDrawing(self, "freestyle1", os.path.join("themes", themename, "freestyletail1.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "freestyle1", "freestyletail1.png", textureSize = (128, 128))
if not engine.loadImgDrawing(self, "freestyle2", os.path.join("themes", themename, "freestyletail2.png"), textureSize = (128, 128)):
engine.loadImgDrawing(self, "freestyle2", "freestyletail2.png", textureSize = (128, 128))
self.twoChordMax = False
self.rockLevel = 0.0
self.neck = Neck(self.engine, self, playerObj)
def selectPreviousString(self):
self.selectedString = (self.selectedString - 1) % self.strings
def selectString(self, string):
self.selectedString = string % self.strings
def selectNextString(self):
self.selectedString = (self.selectedString + 1) % self.strings
def noteBeingHeld(self):
noteHeld = False
for i in range(0,5):
if self.hit[i] == True:
noteHeld = True
return noteHeld
def isKillswitchPossible(self):
possible = False
for i in range(0,5):
if self.hit[i] == True:
possible = True
return possible
def renderTail(self, length, sustain, kill, color, flat = False, tailOnly = False, isTappable = False, big = False, fret = 0, spNote = False, freestyleTail = 0, pos = 0):
#volshebnyi - if freestyleTail == 0, act normally.
# if freestyleTail == 1, render an freestyle tail
# if freestyleTail == 2, render highlighted freestyle tail
if not self.simpleTails:#Tail Colors
tailcol = (1,1,1, color[3])
else:
if big == False and tailOnly == True:
tailcol = (.6, .6, .6, color[3])
else:
tailcol = (color)
#volshebnyi - tail color when sp is active
if self.starPowerActive and self.theme != 2 and not color == (0,0,0,1):#8bit
c = self.fretColors[5]
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], color[3])
if flat:
tailscale = (1, .1, 1)
else:
tailscale = None
if sustain:
if not length == None:
size = (.08, length)
if size[1] > self.boardLength:
s = self.boardLength
else:
s = length
# if freestyleTail == 1, render freestyle tail
if freestyleTail == 0: #normal tail rendering
#myfingershurt: so any theme containing appropriate files can use new tails
if not self.simpleTails:
if big == True and tailOnly == True:
if kill and self.killfx == 0:
zsize = .25
tex1 = self.kill1
tex2 = self.kill2
#volshebnyi - killswitch tail width and color change
kEffect = ( math.sin( pos / 50 ) + 1 ) /2
size = (0.02+kEffect*0.15, s - zsize)
c = [self.killColor[0],self.killColor[1],self.killColor[2]]
if c != [0,0,0]:
for i in range(0,3):
c[i]=c[i]*kEffect+color[i]*(1-kEffect)
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
else:
zsize = .25
size = (.17, s - zsize)
if self.starPowerActive and not color == (0,0,0,1):
tex1 = self.btail6
tex2 = self.btaile6
else:
if fret == 0:
tex1 = self.btail1
tex2 = self.btaile1
elif fret == 1:
tex1 = self.btail2
tex2 = self.btaile2
elif fret == 2:
tex1 = self.btail3
tex2 = self.btaile3
elif fret == 3:
tex1 = self.btail4
tex2 = self.btaile4
elif fret == 4:
tex1 = self.btail5
tex2 = self.btaile5
else:
zsize = .15
size = (.1, s - zsize)
if tailOnly:#Note let go
tex1 = self.tail0
tex2 = self.taile0
else:
if self.starPowerActive and not color == (0,0,0,1):
tex1 = self.tail6
tex2 = self.taile6
else:
if fret == 0:
tex1 = self.tail1
tex2 = self.taile1
elif fret == 1:
tex1 = self.tail2
tex2 = self.taile2
elif fret == 2:
tex1 = self.tail3
tex2 = self.taile3
elif fret == 3:
tex1 = self.tail4
tex2 = self.taile4
elif fret == 4:
tex1 = self.tail5
tex2 = self.taile5
else:
if big == True and tailOnly == True:
if kill:
zsize = .25
tex1 = self.kill1
tex2 = self.kill2
#volshebnyi - killswitch tail width and color change
kEffect = ( math.sin( pos / 50 ) + 1 ) /2
size = (0.02+kEffect*0.15, s - zsize)
c = [self.killColor[0],self.killColor[1],self.killColor[2]]
if c != [0,0,0]:
for i in range(0,3):
c[i]=c[i]*kEffect+color[i]*(1-kEffect)
tailcol = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
else:
zsize = .25
size = (.11, s - zsize)
tex1 = self.bigTail1
tex2 = self.bigTail2
else:
zsize = .15
size = (.08, s - zsize)
tex1 = self.tail1
tex2 = self.tail2
else: #freestyleTail > 0
# render an inactive freestyle tail (self.freestyle1 & self.freestyle2)
zsize = .25
if self.freestyleActive:
size = (.30, s - zsize) #was .15
else:
size = (.15, s - zsize)
tex1 = self.freestyle1
tex2 = self.freestyle2
if freestyleTail == 1:
#glColor4f(*color)
c1, c2, c3, c4 = color
tailGlow = 1 - (pos - self.freestyleLastFretHitTime[fret] ) / self.freestylePeriod
if tailGlow < 0:
tailGlow = 0
color = (c1 + c1*2.0*tailGlow, c2 + c2*2.0*tailGlow, c3 + c3*2.0*tailGlow, c4*0.6 + c4*0.4*tailGlow) #MFH - this fades inactive tails' color darker
tailcol = (color)
if self.theme == 2 and freestyleTail == 0 and big and tailOnly and shaders.enable("tail"):
color = (color[0]*1.5,color[1]*1.5,color[2]*1.5,1.0)
shaders.setVar("color",color)
if kill and self.killfx == 0:
h = shaders.getVar("height")
shaders.modVar("height",0.5,0.06/h-0.1)
shaders.setVar("offset",(5.0-size[1],0.0))
size=(size[0]*15,size[1])
self.engine.draw3Dtex(tex1, vertex = (-size[0], 0, size[0], size[1]), texcoord = (0.0, 0.0, 1.0, 1.0),
scale = tailscale, color = tailcol)
self.engine.draw3Dtex(tex2, vertex = (-size[0], size[1], size[0], size[1] + (zsize)),
scale = tailscale, texcoord = (0.0, 0.05, 1.0, 0.95), color = tailcol)
shaders.disable()
#MFH - this block of code renders the tail "beginning" - before the note, for freestyle "lanes" only
#volshebnyi
if freestyleTail > 0 and pos < self.freestyleStart + self.freestyleLength:
self.engine.draw3Dtex(tex2, vertex = (-size[0], 0-(zsize), size[0], 0 + (.05)),
scale = tailscale, texcoord = (0.0, 0.95, 1.0, 0.05), color = tailcol)
if tailOnly:
return
def renderNote(self, length, sustain, kill, color, flat = False, tailOnly = False, isTappable = False, big = False, fret = 0, spNote = False):
if flat:
glScalef(1, .1, 1)
if tailOnly:
return
if self.twoDnote == True:
#myfingershurt: this should be retrieved once at init, not repeatedly in-game whenever tails are rendered.
if self.notedisappear == True:#Notes keep on going when missed
notecol = (1,1,1)#capo
else:
if flat:#Notes disappear when missed
notecol = (.1,.1,.1)
else:
notecol = (1,1,1)
tailOnly == True
if self.theme < 2:
if self.starspin:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (0.150+self.starSpinFrameIndex*0.05, 0.175+self.starSpinFrameIndex*0.05)
else:
texY = (0.125+self.starSpinFrameIndex*0.05, 0.150+self.starSpinFrameIndex*0.05)
else:
if isTappable:
texY = (0.025,0.05)
else:
texY = (0,0.025)
if self.starPowerActive:
texY = (0.10,0.125) #QQstarS
if isTappable:
texSize = (0.2,0.4)
else:
texSize = (0,0.2)
else:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (0.6, 0.8)
else:
texY = (0.4,0.6)
else:
if isTappable:
texY = (0.2,0.4)
else:
texY = (0,0.2)
if self.starPowerActive:
texY = (0.8,1)
if isTappable:
texSize = (0.2,0.4)
else:
texSize = (0,0.2)
elif self.theme == 2:
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2)
texSize = (fret/5.0,fret/5.0+0.2)
if spNote == True:
if isTappable:
texY = (3*0.166667, 4*0.166667)
else:
texY = (2*0.166667, 3*0.166667)
else:
if isTappable:
texY = (1*0.166667, 2*0.166667)
else:
texY = (0, 1*0.166667)
#myfingershurt: adding spNote==False conditional so that star notes can appear in overdrive
if self.starPowerActive and spNote == False:
if isTappable:
texY = (5*0.166667, 1)
else:
texY = (4*0.166667, 5*0.166667)
self.engine.draw3Dtex(self.noteButtons, vertex = (-size[0],size[1],size[0],-size[1]), texcoord = (texSize[0],texY[0],texSize[1],texY[1]),
scale = (1,1,0), rot = (30,1,0,0), multiples = True, color = color, vertscale = .27)
else:
shaders.setVar("Material",color,"notes")
#mesh = outer ring (black)
#mesh_001 = main note (key color)
#mesh_002 = top (spot or hopo if no mesh_003)
#mesh_003 = hopo bump (hopo color)
if spNote == True and self.starMesh is not None:
meshObj = self.starMesh
else:
meshObj = self.noteMesh
glPushMatrix()
glEnable(GL_DEPTH_TEST)
glDepthMask(1)
glShadeModel(GL_SMOOTH)
if self.noterotate:
glRotatef(90, 0, 1, 0)
glRotatef(-90, 1, 0, 0)
if spNote == True and self.threeDspin == True:
glRotate(90 + self.time/3, 0, 1, 0)
#death_au: fixed 3D note colours
#volshebnyi - note color when sp is active
glColor4f(*color)
if self.starPowerActive and self.theme != 2 and not color == (0,0,0,1):
c = self.fretColors[5]
glColor4f(.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1)
if fret == 0: # green note
glRotate(self.engine.theme.noterot[0], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[0], 0)
elif fret == 1: # red note
glRotate(self.engine.theme.noterot[1], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[1], 0)
elif fret == 2: # yellow
glRotate(self.engine.theme.noterot[2], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[2], 0)
elif fret == 3:# blue note
glRotate(self.engine.theme.noterot[3], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[3], 0)
elif fret == 4:# blue note
glRotate(self.engine.theme.noterot[4], 0, 0, 1), glTranslatef(0, self.engine.theme.notepos[4], 0)
if self.staratex == True and self.starPowerActive and spNote == False:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"staratex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
elif self.notetex == True and spNote == False:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"notetex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
elif self.startex == True and spNote == True:
glColor3f(1,1,1)
glEnable(GL_TEXTURE_2D)
getattr(self,"startex"+chr(97+fret)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if isTappable:
mesh = "Mesh_001"
else:
mesh = "Mesh"
meshObj.render(mesh)
if shaders.enable("notes"):
shaders.setVar("isTextured",True)
meshObj.render(mesh)
shaders.disable()
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
else:
if shaders.enable("notes"):
shaders.setVar("isTextured",False)
meshObj.render("Mesh_001")
shaders.disable()
glColor3f(self.spotColor[0], self.spotColor[1], self.spotColor[2])
if isTappable:
if self.hopoColor[0] == -2:
glColor4f(*color)
else:
glColor3f(self.hopoColor[0], self.hopoColor[1], self.hopoColor[2])
if(meshObj.find("Mesh_003")) == True:
meshObj.render("Mesh_003")
glColor3f(self.spotColor[0], self.spotColor[1], self.spotColor[2])
meshObj.render("Mesh_002")
glColor3f(self.meshColor[0], self.meshColor[1], self.meshColor[2])
meshObj.render("Mesh")
glDepthMask(0)
glPopMatrix()
def renderFreestyleLanes(self, visibility, song, pos):
if not song:
return
if not song.readyToGo:
return
#boardWindowMin = pos - self.currentPeriod * 2
boardWindowMax = pos + self.currentPeriod * self.beatsPerBoard
track = song.midiEventTrack[self.player]
#MFH - render 5 freestyle tails when Song.freestyleMarkingNote comes up
if self.freestyleEnabled:
freestyleActive = False
#for time, event in track.getEvents(boardWindowMin, boardWindowMax):
for time, event in track.getEvents(pos - self.freestyleOffset , boardWindowMax + self.freestyleOffset):
if isinstance(event, Song.MarkerNote):
if event.number == Song.freestyleMarkingNote:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
w = self.boardWidth / self.strings
self.freestyleLength = event.length #volshebnyi
self.freestyleStart = time # volshebnyi
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
#MFH - must extend the tail past the first fretboard section dynamically so we don't have to render the entire length at once
#volshebnyi - allow tail to move under frets
if time - self.freestyleOffset < pos:
freestyleActive = True
if z < -1.5:
length += z +1.5
z = -1.5
#MFH - render 5 freestyle tails
for theFret in range(0,5):
x = (self.strings / 2 - theFret) * w
c = self.fretColors[theFret]
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (theFret + 1), z)
freestyleTailMode = 1
self.renderTail(length, sustain = True, kill = False, color = color, flat = False, tailOnly = True, isTappable = False, big = True, fret = theFret, spNote = False, freestyleTail = freestyleTailMode, pos = pos)
glPopMatrix()
self.freestyleActive = freestyleActive
def renderNotes(self, visibility, song, pos, killswitch):
if not song:
return
if not song.readyToGo:
return
# Update dynamic period
self.currentPeriod = self.neckSpeed
#self.targetPeriod = self.neckSpeed
self.killPoints = False
w = self.boardWidth / self.strings
track = song.track[self.player]
num = 0
enable = True
starEventsInView = False
renderedNotes = reversed(self.getRequiredNotesForRender(song,pos))
for time, event in renderedNotes:
#for time, event in reversed(track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)): #MFH - reverse order of note rendering
if isinstance(event, Tempo):
self.tempoBpm = event.bpm
if self.lastBpmChange > 0 and self.disableVBPM == True:
continue
if (pos - time > self.currentPeriod or self.lastBpmChange < 0) and time > self.lastBpmChange:
self.baseBeat += (time - self.lastBpmChange) / self.currentPeriod
self.targetBpm = event.bpm
self.lastBpmChange = time
self.neck.lastBpmChange = time
self.neck.baseBeat = self.baseBeat
# self.setBPM(self.targetBpm) # glorandwarf: was setDynamicBPM(self.targetBpm)
continue
if not isinstance(event, Note):
continue
if (event.noteBpm == 0.0):
event.noteBpm = self.tempoBpm
if self.coOpFailed:
if self.coOpRestart:
if time - self.coOpRescueTime < (self.currentPeriod * self.beatsPerBoard * 2):
continue
elif self.coOpRescueTime + (self.currentPeriod * self.beatsPerBoard * 2) < pos:
self.coOpFailed = False
self.coOpRestart = False
Log.debug("Turning off coOpFailed. Rescue successful.")
else:
continue #can't break. Tempo.
c = self.fretColors[event.number]
x = (self.strings / 2 - event.number) * w
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
#volshebnyi - hide notes in BRE zone if BRE enabled
if self.freestyleEnabled and self.freestyleStart > 0:
if time >= self.freestyleStart-self.freestyleOffset and time < self.freestyleStart + self.freestyleLength+self.freestyleOffset:
z = -2.0
if self.twoDnote == True and not self.useFretColors:
color = (1,1,1, 1 * visibility * f)
else:
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
if event.length > 120:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
else:
length = 0
flat = False
tailOnly = False
spNote = False
#myfingershurt: user setting for starpower refill / replenish notes
if self.starPowerActive:
if self.spRefillMode == 0: #mode 0 = no starpower / overdrive refill notes
self.spEnabled = False
elif self.spRefillMode == 1 and self.theme != 2: #mode 1 = overdrive refill notes in RB themes only
self.spEnabled = False
elif self.spRefillMode == 2 and song.midiStyle != 1: #mode 2 = refill based on MIDI type
self.spEnabled = False
if event.star:
#self.isStarPhrase = True
starEventsInView = True
if event.finalStar:
self.finalStarSeen = True
starEventsInView = True
if event.star and self.spEnabled:
spNote = True
if event.finalStar and self.spEnabled:
spNote = True
if event.played or event.hopod:
if event.flameCount < 1 and not self.starPowerGained:
Log.debug("star power added")
if self.gameMode2p == 6:
if self.battleSuddenDeath:
self.battleObjects = [1] + self.battleObjects[:2]
else:
self.battleObjects = [self.battleObjectsEnabled[random.randint(0,len(self.battleObjectsEnabled)-1)]] + self.battleObjects[:2]
self.battleGetTime = pos
self.battleObjectGained = True
Log.debug("Battle Object Gained, Objects %s" % str(self.battleObjects))
else:
if self.starPower < 100:
self.starPower += 25
if self.starPower > 100:
self.starPower = 100
self.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
self.starPowerGained = True
if event.tappable < 2:
isTappable = False
else:
isTappable = True
# Clip the played notes to the origin
#myfingershurt: this should be loaded once at init, not every render...
if self.notedisappear == True:#Notes keep on going when missed
###Capo###
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
if z < 0 and not (event.played or event.hopod):
color = (.6, .6, .6, .5 * visibility * f)
flat = True
###endCapo###
else:#Notes disappear when missed
if z < 0:
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
else:
color = (.6, .6, .6, .5 * visibility * f)
flat = True
big = False
self.bigMax = 0
for i in range(0,5):
if self.hit[i]:
big = True
self.bigMax += 1
#MFH - filter out this tail whitening when starpower notes have been disbled from a screwup
if self.spEnabled and killswitch:
if event.star or event.finalStar:
if big == True and tailOnly == True:
self.killPoints = True
color = (1,1,1,1)
if z + length < -1.0:
continue
if event.length <= 120:
length = None
sustain = False
if event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
sustain = True
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (event.number + 1), z)
if shaders.turnon:
shaders.setVar("note_position",(x, (1.0 - visibility) ** (event.number + 1), z),"notes")
if self.battleStatus[8]:
renderNote = random.randint(0,2)
else:
renderNote = 0
if renderNote == 0:
if big == True and num < self.bigMax:
num += 1
self.renderNote(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, big = True, fret = event.number, spNote = spNote)
else:
self.renderNote(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, fret = event.number, spNote = spNote)
glPopMatrix()
if (not starEventsInView and self.finalStarSeen):
self.spEnabled = True
self.finalStarSeen = False
self.isStarPhrase = False
def renderTails(self, visibility, song, pos, killswitch):
if not song:
return
if not song.readyToGo:
return
# Update dynamic period
self.currentPeriod = self.neckSpeed
#self.targetPeriod = self.neckSpeed
self.killPoints = False
w = self.boardWidth / self.strings
track = song.track[self.player]
num = 0
enable = True
renderedNotes = self.getRequiredNotesForRender(song,pos)
for time, event in renderedNotes:
#for time, event in track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard):
if isinstance(event, Tempo):
self.tempoBpm = event.bpm
continue
if not isinstance(event, Note):
continue
if (event.noteBpm == 0.0):
event.noteBpm = self.tempoBpm
if self.coOpFailed:
if self.coOpRestart:
if time - self.coOpRescueTime < (self.currentPeriod * self.beatsPerBoard * 2):
continue
elif self.coOpRescueTime + (self.currentPeriod * self.beatsPerBoard * 2) < pos:
self.coOpFailed = False
self.coOpRestart = False
Log.debug("Turning off coOpFailed. Rescue successful.")
else:
continue
c = self.fretColors[event.number]
x = (self.strings / 2 - event.number) * w
z = ((time - pos) / self.currentPeriod) / self.beatsPerUnit
z2 = ((time + event.length - pos) / self.currentPeriod) / self.beatsPerUnit
if z > self.boardLength * .8:
f = (self.boardLength - z) / (self.boardLength * .2)
elif z < 0:
f = min(1, max(0, 1 + z2))
else:
f = 1.0
color = (.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], 1 * visibility * f)
if event.length > 120:
length = (event.length - 50) / self.currentPeriod / self.beatsPerUnit
else:
length = 0
flat = False
tailOnly = False
spNote = False
#myfingershurt: user setting for starpower refill / replenish notes
if event.star and self.spEnabled:
spNote = True
if event.finalStar and self.spEnabled:
spNote = True
if event.played or event.hopod:
if event.flameCount < 1 and not self.starPowerGained:
if self.gameMode2p == 6:
if self.battleSuddenDeath:
self.battleObjects = [1] + self.battleObjects[:2]
else:
self.battleObjects = [self.battleObjectsEnabled[random.randint(0,len(self.battleObjectsEnabled)-1)]] + self.battleObjects[:2]
self.battleGetTime = pos
self.battleObjectGained = True
Log.debug("Battle Object Gained, Objects %s" % str(self.battleObjects))
else:
if self.starPower < 100:
self.starPower += 25
if self.starPower > 100:
self.starPower = 100
self.neck.overdriveFlashCount = 0 #MFH - this triggers the oFlash strings & timer
self.starPowerGained = True
self.neck.ocount = 0
if event.tappable < 2:
isTappable = False
else:
isTappable = True
# Clip the played notes to the origin
#myfingershurt: this should be loaded once at init, not every render...
if self.notedisappear == True:#Notes keep on going when missed
###Capo###
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
if z < 0 and not (event.played or event.hopod):
color = (.6, .6, .6, .5 * visibility * f)
flat = True
###endCapo###
else:#Notes disappear when missed
if z < 0:
if event.played or event.hopod:
tailOnly = True
length += z
z = 0
if length <= 0:
continue
else:
color = (.6, .6, .6, .5 * visibility * f)
flat = True
big = False
self.bigMax = 0
for i in range(0,5):
if self.hit[i]:
big = True
self.bigMax += 1
if self.spEnabled and killswitch:
if event.star or event.finalStar:
if big == True and tailOnly == True:
self.killPoints = True
color = (1,1,1,1)
if z + length < -1.0:
continue
if event.length <= 120:
length = None
sustain = False
if event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
sustain = True
glPushMatrix()
glTranslatef(x, (1.0 - visibility) ** (event.number + 1), z)
if self.battleStatus[8]:
renderNote = random.randint(0,2)
else:
renderNote = 0
if renderNote == 0:
if big == True and num < self.bigMax:
num += 1
self.renderTail(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, big = True, fret = event.number, spNote = spNote, pos = pos)
else:
self.renderTail(length, sustain = sustain, kill = killswitch, color = color, flat = flat, tailOnly = tailOnly, isTappable = isTappable, fret = event.number, spNote = spNote, pos = pos)
glPopMatrix()
if killswitch and self.killfx == 1:
glBlendFunc(GL_SRC_ALPHA, GL_ONE)
for time, event in self.playedNotes:
step = self.currentPeriod / 16
t = time + event.length
x = (self.strings / 2 - event.number) * w
c = self.fretColors[event.number]
s = t
proj = 1.0 / self.currentPeriod / self.beatsPerUnit
zStep = step * proj
def waveForm(t):
u = ((t - time) * -.1 + pos - time) / 64.0 + .0001
return (math.sin(event.number + self.time * -.01 + t * .03) + math.cos(event.number + self.time * .01 + t * .02)) * .1 + .1 + math.sin(u) / (5 * u)
glBegin(GL_TRIANGLE_STRIP)
f1 = 0
while t > time:
if ((t-pos)*proj) < self.boardLength:
z = (t - pos) * proj
else:
z = self.boardLength
if z < 0:
break
f2 = min((s - t) / (6 * step), 1.0)
a1 = waveForm(t) * f1
a2 = waveForm(t - step) * f2
if self.starPowerActive and self.theme != 2:#8bit
glColor4f(self.spColor[0],self.spColor[1],self.spColor[2],1) #(.3,.7,.9,1)
else:
glColor4f(c[0], c[1], c[2], .5)
glVertex3f(x - a1, 0, z)
glVertex3f(x - a2, 0, z - zStep)
glColor4f(1, 1, 1, .75)
glVertex3f(x, 0, z)
glVertex3f(x, 0, z - zStep)
if self.starPowerActive and self.theme != 2:#8bit
glColor4f(self.spColor[0],self.spColor[1],self.spColor[2],1) #(.3,.7,.9,1)
else:
glColor4f(c[0], c[1], c[2], .5)
glVertex3f(x + a1, 0, z)
glVertex3f(x + a2, 0, z - zStep)
glVertex3f(x + a2, 0, z - zStep)
glVertex3f(x - a2, 0, z - zStep)
t -= step
f1 = f2
glEnd()
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
def renderFrets(self, visibility, song, controls):
w = self.boardWidth / self.strings
size = (.22, .22)
v = 1.0 - visibility
glEnable(GL_DEPTH_TEST)
#Hitglow color option - myfingershurt sez this should be a Guitar class global, not retrieved ever fret render in-game...
for n in range(self.strings):
f = self.fretWeight[n]
c = self.fretColors[n]
if f and (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
f += 0.25
glColor4f(.1 + .8 * c[0] + f, .1 + .8 * c[1] + f, .1 + .8 * c[2] + f, visibility)
if self.fretPress:
y = v + f / 6
else:
y = v / 6
x = (self.strings / 2 - n) * w
if self.twoDkeys == True:
if self.battleStatus[4]:
fretWhamOffset = self.battleWhammyNow * .15
fretColor = (1,1,1,.5)
else:
fretWhamOffset = 0
fretColor = (1,1,1,1)
size = (self.boardWidth/self.strings/2, self.boardWidth/self.strings/2.4)
if self.battleStatus[3] and self.battleFrets != None and self.battleBreakString == n:
texSize = (n/5.0+.042,n/5.0+0.158)
size = (.30, .40)
fretPos = 8 - round((self.battleBreakNow/self.battleBreakLimit) * 8)
texY = (fretPos/8.0,(fretPos + 1.0)/8)
self.engine.draw3Dtex(self.battleFrets, vertex = (size[0],size[1],-size[0],-size[1]), texcoord = (texSize[0], texY[0], texSize[1], texY[1]),
coord = (x,v + .08 + fretWhamOffset,0), multiples = True,color = fretColor, depth = True)
else:
texSize = (n/5.0,n/5.0+0.2)
texY = (0.0,1.0/3.0)
if controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]):
texY = (1.0/3.0,2.0/3.0)
if self.hit[n] or (self.battleStatus[3] and self.battleBreakString == n):
texY = (2.0/3.0,1.0)
self.engine.draw3Dtex(self.fretButtons, vertex = (size[0],size[1],-size[0],-size[1]), texcoord = (texSize[0], texY[0], texSize[1], texY[1]),
coord = (x,v + fretWhamOffset,0), multiples = True,color = fretColor, depth = True)
else:
if self.keyMesh:
glPushMatrix()
glDepthMask(1)
glEnable(GL_LIGHTING)
glEnable(GL_LIGHT0)
glShadeModel(GL_SMOOTH)
glRotatef(90, 0, 1, 0)
glLightfv(GL_LIGHT0, GL_POSITION, (5.0, 10.0, -10.0, 0.0))
glLightfv(GL_LIGHT0, GL_AMBIENT, (.2, .2, .2, 0.0))
glLightfv(GL_LIGHT0, GL_DIFFUSE, (1.0, 1.0, 1.0, 0.0))
glRotatef(-90, 1, 0, 0)
glRotatef(-90, 0, 0, 1)
if n == 0: #green fret button
glRotate(self.engine.theme.keyrot[0], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[0])
elif n == 1: #red fret button
glRotate(self.engine.theme.keyrot[1], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[1])
elif n == 2: #yellow fret button
glRotate(self.engine.theme.keyrot[2], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[2])
elif n == 3: #blue fret button
glRotate(self.engine.theme.keyrot[3], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[3])
elif n == 4: #orange fret button
glRotate(self.engine.theme.keyrot[4], 0, 1, 0), glTranslatef(0, 0, self.engine.theme.keypos[4])
#Mesh - Main fret
#Key_001 - Top of fret (key_color)
#Key_002 - Bottom of fret (key2_color)
#Glow_001 - Only rendered when a note is hit along with the glow.svg
#if self.complexkey == True:
# glColor4f(.1 + .8 * c[0], .1 + .8 * c[1], .1 + .8 * c[2], visibility)
# if self.battleStatus[4]:
# glTranslatef(x, y + self.battleWhammyNow * .15, 0)
# else:
# glTranslatef(x, y, 0)
if self.keytex == True:
glColor4f(1,1,1,visibility)
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15, 0)
else:
glTranslatef(x, y, 0)
glEnable(GL_TEXTURE_2D)
getattr(self,"keytex"+chr(97+n)).texture.bind()
glMatrixMode(GL_TEXTURE)
glScalef(1, -1, 1)
glMatrixMode(GL_MODELVIEW)
glScalef(self.boardScaleX, self.boardScaleY, 1)
if f and not self.hit[n]:
self.keyMesh.render("Mesh_001")
elif self.hit[n]:
self.keyMesh.render("Mesh_002")
else:
self.keyMesh.render("Mesh")
glMatrixMode(GL_TEXTURE)
glLoadIdentity()
glMatrixMode(GL_MODELVIEW)
glDisable(GL_TEXTURE_2D)
else:
glColor4f(.1 + .8 * c[0] + f, .1 + .8 * c[1] + f, .1 + .8 * c[2] + f, visibility)
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15 + v * 6, 0)
else:
glTranslatef(x, y + v * 6, 0)
key = self.keyMesh
if(key.find("Glow_001")) == True:
key.render("Mesh")
if(key.find("Key_001")) == True:
glColor3f(self.keyColor[0], self.keyColor[1], self.keyColor[2])
key.render("Key_001")
if(key.find("Key_002")) == True:
glColor3f(self.key2Color[0], self.key2Color[1], self.key2Color[2])
key.render("Key_002")
else:
key.render()
glDisable(GL_LIGHTING)
glDisable(GL_LIGHT0)
glDepthMask(0)
glPopMatrix()
######################
f = self.fretActivity[n]
if f and self.disableFretSFX != True:
if self.glowColor[0] == -1:
s = 1.0
else:
s = 0.0
while s < 1:
ms = s * (math.sin(self.time) * .25 + 1)
if self.glowColor[0] == -2:
glColor3f(c[0] * (1 - ms), c[1] * (1 - ms), c[2] * (1 - ms))
else:
glColor3f(self.glowColor[0] * (1 - ms), self.glowColor[1] * (1 - ms), self.glowColor[2] * (1 - ms))
glPushMatrix()
if self.battleStatus[4]:
glTranslatef(x, y + self.battleWhammyNow * .15, 0)
else:
glTranslatef(x, y, 0)
glScalef(.1 + .02 * ms * f, .1 + .02 * ms * f, .1 + .02 * ms * f)
glRotatef( 90, 0, 1, 0)
glRotatef(-90, 1, 0, 0)
glRotatef(-90, 0, 0, 1)
if self.twoDkeys == False and self.keytex == False:
if(self.keyMesh.find("Glow_001")) == True:
key.render("Glow_001")
else:
key.render()
glPopMatrix()
s += 0.2
#Hitglow color
if self.hitglow_color == 0:
glowcol = (c[0], c[1], c[2])#Same as fret
elif self.hitglow_color == 1:
glowcol = (1, 1, 1)#Actual color in .svg-file
f += 2
if self.battleStatus[4]:
self.engine.draw3Dtex(self.glowDrawing, coord = (x, y + self.battleWhammyNow * .15, 0.01), rot = (f * 90 + self.time, 0, 1, 0),
texcoord = (0.0, 0.0, 1.0, 1.0), vertex = (-size[0] * f, -size[1] * f, size[0] * f, size[1] * f),
multiples = True, alpha = True, color = glowcol)
else:
self.engine.draw3Dtex(self.glowDrawing, coord = (x, y, 0.01), rot = (f * 90 + self.time, 0, 1, 0),
texcoord = (0.0, 0.0, 1.0, 1.0), vertex = (-size[0] * f, -size[1] * f, size[0] * f, size[1] * f),
multiples = True, alpha = True, color = glowcol)
#self.hit[n] = False #MFH -- why? This prevents frets from being rendered under / before the notes...
glDisable(GL_DEPTH_TEST)
def renderFreestyleFlames(self, visibility, controls):
if self.flameColors[0][0][0] == -1:
return
w = self.boardWidth / self.strings
#track = song.track[self.player]
size = (.22, .22)
v = 1.0 - visibility
if self.disableFlameSFX != True:
flameLimit = 10.0
flameLimitHalf = round(flameLimit/2.0)
for fretNum in range(self.strings):
if controls.getState(self.keys[fretNum]) or controls.getState(self.keys[fretNum+5]):
if self.freestyleHitFlameCounts[fretNum] < flameLimit:
ms = math.sin(self.time) * .25 + 1
x = (self.strings / 2 - fretNum) * w
ff = 1 + 0.25
y = v + ff / 6
if self.theme == 2:
y -= 0.5
#flameSize = self.flameSizes[self.scoreMultiplier - 1][fretNum]
flameSize = self.flameSizes[self.cappedScoreMult - 1][fretNum]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else: #MFH - fixing crash!
#try:
# flameColor = self.flameColors[self.scoreMultiplier - 1][fretNum]
#except IndexError:
flameColor = self.fretColors[fretNum]
if flameColor[0] == -2:
flameColor = self.fretColors[fretNum]
ff += 1.5 #ff first time is 2.75 after this
if self.freestyleHitFlameCounts[fretNum] < flameLimitHalf:
flamecol = tuple([flameColor[ifc] for ifc in range(3)])
rbStarColor = (.1, .1, .2, .3)
xOffset = (.0, - .005, .005, .0)
yOffset = (.20, .255, .255, .255)
scaleMod = .6 * ms * ff
scaleFix = (6.0, 5.5, 5.0, 4.7)
for step in range(4):
if self.starPowerActive and self.theme < 2:
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (rbStarColor[step],)*3
hfCount = self.freestyleHitFlameCounts[fretNum]
if step == 0:
hfCount += 1
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x+xOffset[step], y+yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + .05 * step + scaleMod, hfCount/scaleFix[step] + scaleMod, hfCount/scaleFix[step] + scaleMod),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
else:
flameColorMod = 0.1 * (flameLimit - self.freestyleHitFlameCounts[fretNum])
flamecol = tuple([flameColor[ifc]*flameColorMod for ifc in range(3)])
xOffset = (.0, - .005, .005, .005)
yOffset = (.35, .405, .355, .355)
scaleMod = .6 * ms * ff
scaleFix = (3.0, 2.5, 2.0, 1.7)
for step in range(4):
hfCount = self.freestyleHitFlameCounts[fretNum]
if step == 0:
hfCount += 1
else:
if self.starPowerActive and self.theme < 2:
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.4+.1*step,)*3
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x+xOffset[step], y+yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + .05 * step + scaleMod, hfCount/scaleFix[step] + scaleMod, hfCount/scaleFix[step] + scaleMod),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
self.freestyleHitFlameCounts[fretNum] += 1
else: #MFH - flame count is done - reset it!
self.freestyleHitFlameCounts[fretNum] = 0 #MFH
def renderFlames(self, visibility, song, pos, controls):
if not song or self.flameColors[0][0][0] == -1:
return
w = self.boardWidth / self.strings
track = song.track[self.player]
size = (.22, .22)
v = 1.0 - visibility
if self.disableFlameSFX != True and (self.HCountAni == True and self.HCount2 > 12):
for n in range(self.strings):
f = self.fretWeight[n]
c = self.fretColors[n]
if f and (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
f += 0.25
y = v + f / 6
x = (self.strings / 2 - n) * w
f = self.fretActivity[n]
if f:
ms = math.sin(self.time) * .25 + 1
ff = f
ff += 1.2
#myfingershurt: need to cap flameSizes use of scoreMultiplier to 4x, the 5x and 6x bass groove mults cause crash:
self.cappedScoreMult = min(self.scoreMultiplier,4)
flameSize = self.flameSizes[self.cappedScoreMult - 1][n]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else:
flameColor = self.flameColors[self.cappedScoreMult - 1][n]
flameColorMod = (1.19, 1.97, 10.59)
flamecol = tuple([flameColor[ifc]*flameColorMod[ifc] for ifc in range(3)])
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.9,.9,.9)
if self.Hitanim != True:
self.engine.draw3Dtex(self.hitglowDrawing, coord = (x, y + .125, 0), rot = (90, 1, 0, 0),
scale = (0.5 + .6 * ms * ff, 1.5 + .6 * ms * ff, 1 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
#Alarian: Animated hitflames
else:
self.HCount = self.HCount + 1
if self.HCount > self.Animspeed-1:
self.HCount = 0
HIndex = (self.HCount * 16 - (self.HCount * 16) % self.Animspeed) / self.Animspeed
if HIndex > 15:
HIndex = 0
texX = (HIndex*(1/16.0), HIndex*(1/16.0)+(1/16.0))
self.engine.draw3Dtex(self.hitglowAnim, coord = (x, y + .225, 0), rot = (90, 1, 0, 0), scale = (2.4, 1, 3.3),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (texX[0],0.0,texX[1],1.0), multiples = True, alpha = True, color = (1,1,1))
ff += .3
flameColorMod = (1.19, 1.78, 12.22)
flamecol = tuple([flameColor[ifc]*flameColorMod[ifc] for ifc in range(3)])
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
flamecol = self.spColor
else: #Default starcolor (Rockband)
flamecol = (.8,.8,.8)
if self.Hitanim != True:
self.engine.draw3Dtex(self.hitglow2Drawing, coord = (x, y + .25, .05), rot = (90, 1, 0, 0),
scale = (.40 + .6 * ms * ff, 1.5 + .6 * ms * ff, 1 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = flamecol)
if self.disableFlameSFX != True:
flameLimit = 10.0
flameLimitHalf = round(flameLimit/2.0)
renderedNotes = self.getRequiredNotesForRender(song,pos)
for time, event in renderedNotes:
if isinstance(event, Tempo):
continue
if not isinstance(event, Note):
continue
if (event.played or event.hopod) and event.flameCount < flameLimit:
ms = math.sin(self.time) * .25 + 1
x = (self.strings / 2 - event.number) * w
xlightning = (self.strings / 2 - event.number)*2.2*w
ff = 1 + 0.25
y = v + ff / 6
if self.theme == 2:
y -= 0.5
flameSize = self.flameSizes[self.cappedScoreMult - 1][event.number]
if self.theme == 0 or self.theme == 1: #THIS SETS UP GH3 COLOR, ELSE ROCKBAND(which is DEFAULT in Theme.py)
flameColor = self.gh3flameColor
else:
flameColor = self.flameColors[self.cappedScoreMult - 1][event.number]
if flameColor[0] == -2:
flameColor = self.fretColors[event.number]
ff += 1.5 #ff first time is 2.75 after this
if self.Hitanim2 == True:
self.HCount2 = self.HCount2 + 1
self.HCountAni = False
if self.HCount2 > 12:
if not event.length > (1.4 * (60000.0 / event.noteBpm) / 4):
self.HCount2 = 0
else:
self.HCountAni = True
if event.flameCount < flameLimitHalf:
HIndex = (self.HCount2 * 13 - (self.HCount2 * 13) % 13) / 13
if HIndex > 12 and self.HCountAni != True:
HIndex = 0
texX = (HIndex*(1/13.0), HIndex*(1/13.0)+(1/13.0))
self.engine.draw3Dtex(self.hitflamesAnim, coord = (x, y + .665, 0), rot = (90, 1, 0, 0), scale = (1.6, 1.6, 4.9),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff),
texcoord = (texX[0],0.0,texX[1],1.0), multiples = True, alpha = True, color = (1,1,1))
else:
flameColorMod = 0.1 * (flameLimit - event.flameCount)
flamecol = tuple([ifc*flameColorMod for ifc in flameColor])
scaleChange = (3.0,2.5,2.0,1.7)
yOffset = (.35, .405, .355, .355)
vtx = flameSize * ff
scaleMod = .6 * ms * ff
for step in range(4):
#draw lightning in GH themes on SP gain
if step == 0 and self.theme != 2 and event.finalStar and self.spEnabled:
self.engine.draw3Dtex(self.hitlightning, coord = (xlightning, y, 3.3), rot = (90, 1, 0, 0),
scale = (.15 + .5 * ms * ff, event.flameCount / 3.0 + .6 * ms * ff, 2), vertex = (.4,-2,-.4,2),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = (1,1,1))
continue
if step == 0:
yzscaleMod = event.flameCount/ scaleChange[step]
else:
yzscaleMod = (event.flameCount + 1)/ scaleChange[step]
if self.starPowerActive:
if self.theme == 0 or self.theme == 1:
spcolmod = .7+step*.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else:
flamecol = (.4+step*.1,)*3#Default starcolor (Rockband)
if self.hitFlamesPresent == True:
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x - .005, y + yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + step*.05 + scaleMod, yzscaleMod + scaleMod, yzscaleMod + scaleMod),
vertex = (-vtx,-vtx,vtx,vtx), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
elif self.hitFlamesPresent == True and self.Hitanim2 == False:
self.HCount2 = 13
self.HCountAni = True
if event.flameCount < flameLimitHalf:
flamecol = flameColor
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
spcolmod = .3
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else: #Default starcolor (Rockband)
flamecol = (.1,.1,.1)
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x, y + .20, 0), rot = (90, 1, 0, 0),
scale = (.25 + .6 * ms * ff, event.flameCount/6.0 + .6 * ms * ff, event.flameCount / 6.0 + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
for i in range(3):
if self.starPowerActive:
if self.theme == 0 or self.theme == 1: #GH3 starcolor
spcolmod = 0.4+i*0.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else: #Default starcolor (Rockband)
flamecol = (0.1+i*0.1,)*3
self.engine.draw3Dtex(self.hitflames2Drawing, coord = (x-.005, y + .255, 0), rot = (90, 1, 0, 0),
scale = (.30 + i*0.05 + .6 * ms * ff, event.flameCount/(5.5 - i*0.4) + .6 * ms * ff, event.flameCount / (5.5 - i*0.4) + .6 * ms * ff),
vertex = (-flameSize * ff,-flameSize * ff,flameSize * ff,flameSize * ff), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
else:
flameColorMod = 0.1 * (flameLimit - event.flameCount)
flamecol = tuple([ifc*flameColorMod for ifc in flameColor])
scaleChange = (3.0,2.5,2.0,1.7)
yOffset = (.35, .405, .355, .355)
vtx = flameSize * ff
scaleMod = .6 * ms * ff
for step in range(4):
#draw lightning in GH themes on SP gain
if step == 0 and self.theme != 2 and event.finalStar and self.spEnabled:
self.engine.draw3Dtex(self.hitlightning, coord = (xlightning, y, 3.3), rot = (90, 1, 0, 0),
scale = (.15 + .5 * ms * ff, event.flameCount / 3.0 + .6 * ms * ff, 2), vertex = (.4,-2,-.4,2),
texcoord = (0.0,0.0,1.0,1.0), multiples = True, alpha = True, color = (1,1,1))
continue
if step == 0:
yzscaleMod = event.flameCount/ scaleChange[step]
else:
yzscaleMod = (event.flameCount + 1)/ scaleChange[step]
if self.starPowerActive:
if self.theme == 0 or self.theme == 1:
spcolmod = .7+step*.1
flamecol = tuple([isp*spcolmod for isp in self.spColor])
else:
flamecol = (.4+step*.1,)*3#Default starcolor (Rockband)
self.engine.draw3Dtex(self.hitflames1Drawing, coord = (x - .005, y + yOffset[step], 0), rot = (90, 1, 0, 0),
scale = (.25 + step*.05 + scaleMod, yzscaleMod + scaleMod, yzscaleMod + scaleMod),
vertex = (-vtx,-vtx,vtx,vtx), texcoord = (0.0,0.0,1.0,1.0),
multiples = True, alpha = True, color = flamecol)
event.flameCount += 1
def render(self, visibility, song, pos, controls, killswitch):
if shaders.turnon:
shaders.globals["dfActive"] = self.drumFillsActive
shaders.globals["breActive"] = self.freestyleActive
shaders.globals["rockLevel"] = self.rockLevel
if shaders.globals["killswitch"] != killswitch:
shaders.globals["killswitchPos"] = pos
shaders.globals["killswitch"] = killswitch
shaders.modVar("height",0.2,0.2,1.0,"tail")
if not self.starNotesSet == True:
self.totalNotes = 0
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
self.totalNotes += 1
stars = []
maxStars = []
maxPhrase = self.totalNotes/120
for q in range(0,maxPhrase):
for n in range(0,10):
stars.append(self.totalNotes/maxPhrase*(q)+n+maxPhrase/4)
maxStars.append(self.totalNotes/maxPhrase*(q)+10+maxPhrase/4)
i = 0
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
for a in stars:
if i == a:
self.starNotes.append(time)
event.star = True
for a in maxStars:
if i == a:
self.maxStars.append(time)
event.finalStar = True
i += 1
for time, event in song.track[self.player].getAllEvents():
if not isinstance(event, Note):
continue
for q in self.starNotes:
if time == q:
event.star = True
for q in self.maxStars:
#if time == q and not event.finalStar:
# event.star = True
if time == q: #MFH - no need to mark only the final SP phrase note as the finalStar as in drums, they will be hit simultaneously here.
event.finalStar = True
self.starNotesSet = True
if not (self.coOpFailed and not self.coOpRestart):
glEnable(GL_BLEND)
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)
glEnable(GL_COLOR_MATERIAL)
if self.leftyMode:
if not self.battleStatus[6]:
glScalef(-1, 1, 1)
elif self.battleStatus[6]:
glScalef(-1, 1, 1)
if self.freestyleActive:
self.renderTails(visibility, song, pos, killswitch)
self.renderNotes(visibility, song, pos, killswitch)
self.renderFreestyleLanes(visibility, song, pos) #MFH - render the lanes on top of the notes.
self.renderFrets(visibility, song, controls)
if self.hitFlamesPresent: #MFH - only if present!
self.renderFreestyleFlames(visibility, controls) #MFH - freestyle hit flames
else:
self.renderTails(visibility, song, pos, killswitch)
if self.fretsUnderNotes: #MFH
if self.twoDnote == True:
self.renderFrets(visibility, song, controls)
self.renderNotes(visibility, song, pos, killswitch)
else:
self.renderNotes(visibility, song, pos, killswitch)
self.renderFrets(visibility, song, controls)
else:
self.renderNotes(visibility, song, pos, killswitch)
self.renderFrets(visibility, song, controls)
self.renderFreestyleLanes(visibility, song, pos) #MFH - render the lanes on top of the notes.
if self.hitFlamesPresent: #MFH - only if present!
self.renderFlames(visibility, song, pos, controls) #MFH - only when freestyle inactive!
if self.leftyMode:
if not self.battleStatus[6]:
glScalef(-1, 1, 1)
elif self.battleStatus[6]:
glScalef(-1, 1, 1)
#return notes
#MFH - corrected and optimized:
#def getRequiredNotesMFH(self, song, pos):
def getRequiredNotesMFH(self, song, pos, hopoTroubleCheck = False):
if self.battleStatus[2] and self.difficulty != 0:
if pos < self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard or pos > self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue]
else:
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue - 1]
track = song.track[self.player]
if hopoTroubleCheck:
notes = [(time, event) for time, event in track.getEvents(pos, pos + (self.earlyMargin*2)) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not time==pos] #MFH - filter out the problem note that caused this check!
else:
notes = [(time, event) for time, event in track.getEvents(pos - self.lateMargin, pos + self.earlyMargin) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not (event.hopod or event.played or event.skipped)]
notes = [(time, event) for time, event in notes if (time >= (pos - self.lateMargin)) and (time <= (pos + self.earlyMargin))]
sorted(notes, key=lambda x: x[0])
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return sorted(notes, key=lambda x: x[0]) #MFH - what the hell, this should be sorted by TIME not note number....
def getDoubleNotes(self, notes):
if self.battleStatus[7] and notes != []:
notes = sorted(notes, key=lambda x: x[0])
curTime = 0
tempnotes = []
tempnumbers = []
tempnote = None
curNumbers = []
noteCount = 0
for time, note in notes:
noteCount += 1
if not isinstance(note, Note):
if noteCount == len(notes) and len(curNumbers) < 3 and len(curNumbers) > 0:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
continue
if time != curTime:
if curTime != 0 and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif (curTime != 0 or noteCount == len(notes)) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
tempnotes.append((time,deepcopy(note)))
curTime = time
curNumbers.append(note.number)
if noteCount == len(notes) and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
else:
curNumbers.append(note.number)
if noteCount == len(notes) and len(curNumbers) < 3:
maxNote = curNumbers[0]
minNote = curNumbers[0]
for i in range(0, len(curNumbers)):
if curNumbers[i] > maxNote:
maxNote = curNumbers[i]
if curNumbers[i] < minNote:
minNote = curNumbers[i]
curNumbers = []
if maxNote < 4:
tempnumbers.append(maxNote + 1)
elif minNote > 0:
tempnumbers.append(minNote - 1)
else:
tempnumbers.append(2)
elif noteCount == len(notes) and len(curNumbers) > 2:
tempnumbers.append(-1)
curNumbers = []
noteCount = 0
for time, note in tempnotes:
if tempnumbers[noteCount] != -1:
note.number = tempnumbers[noteCount]
noteCount += 1
if time > self.battleStartTimes[7] + self.currentPeriod * self.beatsPerBoard and time < self.battleStartTimes[7] - self.currentPeriod * self.beatsPerBoard + self.battleDoubleLength:
notes.append((time,note))
else:
noteCount += 1
return sorted(notes, key=lambda x: x[0])
def getRequiredNotesForRender(self, song, pos):
if self.battleStatus[2] and self.difficulty != 0:
Log.debug(self.battleDiffUpValue)
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue]
track0 = song.track[self.player]
notes0 = [(time, event) for time, event in track0.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
song.difficulty[self.player] = Song.difficulties[self.battleDiffUpValue - 1]
track1 = song.track[self.player]
notes1 = [(time, event) for time, event in track1.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
notes = []
for time,note in notes0:
if time < self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard or time > self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
notes.append((time,note))
for time,note in notes1:
if time > self.battleStartTimes[2] + self.currentPeriod * self.beatsPerBoard and time < self.battleStartTimes[2] - self.currentPeriod * self.beatsPerBoard + self.battleDiffUpLength:
notes.append((time,note))
notes0 = None
notes1 = None
track0 = None
track1 = None
notes = sorted(notes, key=lambda x: x[0])
#Log.debug(notes)
else:
track = song.track[self.player]
notes = [(time, event) for time, event in track.getEvents(pos - self.currentPeriod * 2, pos + self.currentPeriod * self.beatsPerBoard)]
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return notes
#MFH - corrected and optimized:
def getRequiredNotesForJurgenOnTime(self, song, pos):
track = song.track[self.player]
notes = [(time, event) for time, event in track.getEvents(pos - self.lateMargin, pos + 30) if isinstance(event, Note)]
notes = [(time, event) for time, event in notes if not (event.hopod or event.played or event.skipped)]
if self.battleStatus[7]:
notes = self.getDoubleNotes(notes)
return sorted(notes, key=lambda x: x[0]) #MFH - what the hell, this should be sorted by TIME not note number....
def controlsMatchNotes(self, controls, notes):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for k in self.keys:
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
for n in range(self.strings):
if n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
return False
if not n in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
# The lower frets can be held down
if n > max(requiredKeys):
return False
if twochord != 0:
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
if twochord == 2:
self.twoChord += skipped
return True
def controlsMatchNotes2(self, controls, notes, hopo = False):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if note.hopod == True and (controls.getState(self.keys[note.number]) or controls.getState(self.keys[note.number + 5])):
#if hopo == True and controls.getState(self.keys[note.number]):
self.playedNotes = []
return True
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
for n in range(self.strings):
if n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
return False
if not n in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5])):
# The lower frets can be held down
if hopo == False and n >= min(requiredKeys):
return False
if twochord != 0:
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
if twochord == 2:
self.twoChord += skipped
return True
def controlsMatchNotes3(self, controls, notes, hopo = False):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if note.hopod == True and (controls.getState(self.keys[note.number]) or controls.getState(self.keys[note.number + 5])):
#if hopo == True and controls.getState(self.keys[note.number]):
self.playedNotes = []
return True
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
#chordlist.sort(lambda a, b: cmp(a[0][0], b[0][0]))
chordlist.sort(key=lambda a: a[0][0])
self.missedNotes = []
self.missedNoteNums = []
twochord = 0
for chord in chordlist:
# matching keys?
requiredKeys = [note.number for time, note in chord]
requiredKeys = self.uniqify(requiredKeys)
if len(requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(requiredKeys) - 2
requiredKeys = [min(requiredKeys), max(requiredKeys)]
else:
twochord = 0
if (self.controlsMatchNote3(controls, chord, requiredKeys, hopo)):
if twochord != 2:
for time, note in chord:
note.played = True
else:
self.twoChordApply = True
for time, note in chord:
note.skipped = True
chord[0][1].skipped = False
chord[-1][1].skipped = False
chord[0][1].played = True
chord[-1][1].played = True
break
if hopo == True:
break
self.missedNotes.append(chord)
else:
self.missedNotes = []
self.missedNoteNums = []
for chord in self.missedNotes:
for time, note in chord:
if self.debugMode:
self.missedNoteNums.append(note.number)
note.skipped = True
note.played = False
if twochord == 2:
self.twoChord += skipped
return True
#MFH - special function for HOPO intentions checking
def controlsMatchNextChord(self, controls, notes):
# no notes?
if not notes:
return False
# check each valid chord
chords = {}
for time, note in notes:
if not time in chords:
chords[time] = []
chords[time].append((time, note))
#Make sure the notes are in the right time order
chordlist = chords.values()
chordlist.sort(key=lambda a: a[0][0])
twochord = 0
for chord in chordlist:
# matching keys?
self.requiredKeys = [note.number for time, note in chord]
self.requiredKeys = self.uniqify(self.requiredKeys)
if len(self.requiredKeys) > 2 and self.twoChordMax == True:
twochord = 0
self.twoChordApply = True
for n, k in enumerate(self.keys):
if controls.getState(k):
twochord += 1
if twochord == 2:
skipped = len(self.requiredKeys) - 2
self.requiredKeys = [min(self.requiredKeys), max(self.requiredKeys)]
else:
twochord = 0
if (self.controlsMatchNote3(controls, chord, self.requiredKeys, False)):
return True
else:
return False
def uniqify(self, seq, idfun=None):
# order preserving
if idfun is None:
def idfun(x): return x
seen = {}
result = []
for item in seq:
marker = idfun(item)
# in old Python versions:
# if seen.has_key(marker)
# but in new ones:
if marker in seen: continue
seen[marker] = 1
result.append(item)
return result
def controlsMatchNote3(self, controls, chordTuple, requiredKeys, hopo):
if len(chordTuple) > 1:
#Chords must match exactly
for n in range(self.strings):
if (n in requiredKeys and not (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]))) or (n not in requiredKeys and (controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]))):
return False
else:
#Single Note must match that note
requiredKey = requiredKeys[0]
if not controls.getState(self.keys[requiredKey]) and not controls.getState(self.keys[requiredKey+5]):
return False
#myfingershurt: this is where to filter out higher frets held when HOPOing:
if hopo == False or self.hopoStyle == 2 or self.hopoStyle == 3:
#Check for higher numbered frets if not a HOPO or if GH2 strict mode
for n, k in enumerate(self.keys):
if (n > requiredKey and n < 5) or (n > 4 and n > requiredKey + 5):
#higher numbered frets cannot be held
if controls.getState(k):
return False
return True
def areNotesTappable(self, notes):
if not notes:
return
for time, note in notes:
if note.tappable > 1:
return True
return False
def startPick(self, song, pos, controls, hopo = False):
if hopo == True:
res = startPick2(song, pos, controls, hopo)
return res
if not song:
return False
if not song.readyToGo:
return False
self.playedNotes = []
self.matchingNotes = self.getRequiredNotes(song, pos)
if self.controlsMatchNotes(controls, self.matchingNotes):
self.pickStartPos = pos
for time, note in self.matchingNotes:
if note.skipped == True:
continue
self.pickStartPos = max(self.pickStartPos, time)
note.played = True
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
return True
return False
def startPick2(self, song, pos, controls, hopo = False):
if not song:
return False
if not song.readyToGo:
return False
self.playedNotes = []
self.matchingNotes = self.getRequiredNotes2(song, pos, hopo)
if self.controlsMatchNotes2(controls, self.matchingNotes, hopo):
self.pickStartPos = pos
for time, note in self.matchingNotes:
if note.skipped == True:
continue
self.pickStartPos = max(self.pickStartPos, time)
if hopo:
note.hopod = True
else:
note.played = True
if note.tappable == 1 or note.tappable == 2:
self.hopoActive = time
self.wasLastNoteHopod = True
elif note.tappable == 3:
self.hopoActive = -time
self.wasLastNoteHopod = True
else:
self.hopoActive = 0
self.wasLastNoteHopod = False
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
self.hopoLast = note.number
return True
return False
def startPick3(self, song, pos, controls, hopo = False):
if not song:
return False
if not song.readyToGo:
return False
self.lastPlayedNotes = self.playedNotes
self.playedNotes = []
self.matchingNotes = self.getRequiredNotesMFH(song, pos)
self.controlsMatchNotes3(controls, self.matchingNotes, hopo)
#myfingershurt
for time, note in self.matchingNotes:
if note.played != True:
continue
if shaders.turnon:
shaders.var["fret"][self.player][note.number]=shaders.time()
shaders.var["fretpos"][self.player][note.number]=pos
self.pickStartPos = pos
self.pickStartPos = max(self.pickStartPos, time)
if hopo:
note.hopod = True
else:
note.played = True
#self.wasLastNoteHopod = False
if note.tappable == 1 or note.tappable == 2:
self.hopoActive = time
self.wasLastNoteHopod = True
elif note.tappable == 3:
self.hopoActive = -time
self.wasLastNoteHopod = True
if hopo: #MFH - you just tapped a 3 - make a note of it. (har har)
self.hopoProblemNoteNum = note.number
self.sameNoteHopoString = True
else:
self.hopoActive = 0
self.wasLastNoteHopod = False
self.hopoLast = note.number
self.playedNotes.append([time, note])
if self.guitarSolo:
self.currentGuitarSoloHitNotes += 1
#myfingershurt: be sure to catch when a chord is played
if len(self.playedNotes) > 1:
lastPlayedNote = None
for time, note in self.playedNotes:
if isinstance(lastPlayedNote, Note):
if note.tappable == 1 and lastPlayedNote.tappable == 1:
self.LastStrumWasChord = True
#self.sameNoteHopoString = False
else:
self.LastStrumWasChord = False
lastPlayedNote = note
elif len(self.playedNotes) > 0: #ensure at least that a note was played here
self.LastStrumWasChord = False
if len(self.playedNotes) != 0:
return True
return False
def soloFreestylePick(self, song, pos, controls):
numHits = 0
for theFret in range(5):
self.freestyleHit[theFret] = controls.getState(self.keys[theFret+5])
if self.freestyleHit[theFret]:
if shaders.turnon:
shaders.var["fret"][self.player][theFret]=shaders.time()
shaders.var["fretpos"][self.player][theFret]=pos
numHits += 1
return numHits
#MFH - TODO - handle freestyle picks here
def freestylePick(self, song, pos, controls):
numHits = 0
#if not song:
# return numHits
if not controls.getState(self.actions[0]) and not controls.getState(self.actions[1]):
return 0
for theFret in range(5):
self.freestyleHit[theFret] = controls.getState(self.keys[theFret])
if self.freestyleHit[theFret]:
if shaders.turnon:
shaders.var["fret"][self.player][theFret]=shaders.time()
shaders.var["fretpos"][self.player][theFret]=pos
numHits += 1
return numHits
def endPick(self, pos):
for time, note in self.playedNotes:
if time + note.length > pos + self.noteReleaseMargin:
self.playedNotes = []
return False
self.playedNotes = []
return True
def getPickLength(self, pos):
if not self.playedNotes:
return 0.0
# The pick length is limited by the played notes
pickLength = pos - self.pickStartPos
for time, note in self.playedNotes:
pickLength = min(pickLength, note.length)
return pickLength
def coOpRescue(self, pos):
self.coOpRestart = True #initializes Restart Timer
self.coOpRescueTime = pos
self.starPower = 0
Log.debug("Rescued at " + str(pos))
def run(self, ticks, pos, controls):
if not self.paused:
self.time += ticks
#MFH - Determine which frame to display for starpower notes
if self.starspin:
self.indexCount = self.indexCount + 1
if self.indexCount > self.Animspeed-1:
self.indexCount = 0
self.starSpinFrameIndex = (self.indexCount * self.starSpinFrames - (self.indexCount * self.starSpinFrames) % self.Animspeed) / self.Animspeed
if self.starSpinFrameIndex > self.starSpinFrames - 1:
self.starSpinFrameIndex = 0
#myfingershurt: must not decrease SP if paused.
if self.starPowerActive == True and self.paused == False:
self.starPower -= ticks/self.starPowerDecreaseDivisor
if self.starPower <= 0:
self.starPower = 0
self.starPowerActive = False
#MFH - call to play star power deactivation sound, if it exists (if not play nothing)
if self.engine.data.starDeActivateSoundFound:
#self.engine.data.starDeActivateSound.setVolume(self.sfxVolume)
self.engine.data.starDeActivateSound.play()
# update frets
if self.editorMode:
if (controls.getState(self.actions[0]) or controls.getState(self.actions[1])):
for i in range(self.strings):
if controls.getState(self.keys[i]) or controls.getState(self.keys[i+5]):
activeFrets.append(i)
activeFrets = activeFrets or [self.selectedString]
else:
activeFrets = []
else:
activeFrets = [note.number for time, note in self.playedNotes]
for n in range(self.strings):
if controls.getState(self.keys[n]) or controls.getState(self.keys[n+5]) or (self.editorMode and self.selectedString == n):
self.fretWeight[n] = 0.5
else:
self.fretWeight[n] = max(self.fretWeight[n] - ticks / 64.0, 0.0)
if n in activeFrets:
self.fretActivity[n] = min(self.fretActivity[n] + ticks / 32.0, 1.0)
else:
self.fretActivity[n] = max(self.fretActivity[n] - ticks / 64.0, 0.0)
#MFH - THIS is where note sustains should be determined... NOT in renderNotes / renderFrets / renderFlames -.-
if self.fretActivity[n]:
self.hit[n] = True
else:
self.hit[n] = False
if self.vbpmLogicType == 0: #MFH - VBPM (old)
if self.currentBpm != self.targetBpm:
diff = self.targetBpm - self.currentBpm
if (round((diff * .03), 4) != 0):
self.currentBpm = round(self.currentBpm + (diff * .03), 4)
else:
self.currentBpm = self.targetBpm
self.setBPM(self.currentBpm) # glorandwarf: was setDynamicBPM(self.currentBpm)
for time, note in self.playedNotes:
if pos > time + note.length:
return False
return True
|
gpl-2.0
| -2,108,389,822,770,771,500
| 37.978067
| 223
| 0.549018
| false
| 3.462779
| false
| false
| false
|
gustavofoa/pympm
|
apps/mpm/models/Musica.py
|
1
|
2085
|
from django.db import models
class Musica(models.Model):
slug = models.SlugField(primary_key=True, max_length=100)
nome = models.CharField(max_length=255)
letra = models.TextField()
cifra = models.TextField()
info = models.TextField()
link_video = models.URLField(blank=True, null=True)
categorias = models.ManyToManyField("Categoria")
rating = models.FloatField(blank=True, null=True)
votes = models.PositiveIntegerField(blank=True, null=True)
link_lpsalmo = models.URLField(blank=True, null=True)
tem_imagem = models.BooleanField(default=False)
banner_lateral = models.ForeignKey("Banner", related_name="banner_lateral_mus", blank=True, null=True)
banner_footer = models.ForeignKey("Banner", related_name="banner_footer_mus", blank=True, null=True)
class Meta:
app_label = "mpm"
def __str__(self):
return self.nome.encode('utf-8')
def get_video_code(self):
if self.link_video:
try:
return self.link_video[self.link_video.rindex('/'):].replace("embed",'').replace('watch?v=','').replace('v=','')
except ValueError:
return ""
else:
return ""
def add_rate(self, rate):
#weighted average
self.rating = (self.rating * self.votes + rate*100/5) / (self.votes + 1)
self.votes += 1
def get_rating_per_5(self):
return self.rating * 5 / 100.0
def get_formated_rating(self):
return "%.2f" % self.rating
def get_legend(self):
plural = ""
if(self.votes > 1):
plural = "s"
retorno = "<span property='ratingValue'>%.2f</span> em <span property='ratingCount'>%d</span> voto%s"
return retorno % (self.get_rating_per_5(), self.votes, plural)
def get_absolute_url(self):
return "/musica/%s/" % self.slug
def get_inicio(self):
retorno = self.letra[:140].replace("<strong>",'').replace("<strong",'').replace("<stron",'').replace("<stro",'').replace("<str",'').replace("<st",'').replace("<s",'')
retorno = retorno.replace("</strong>",'').replace("</strong",'').replace("</stron",'').replace("</stro",'').replace("</str",'').replace("</st",'').replace("</s",'')
retorno = retorno.replace("</",'').replace("<",'')
return retorno
|
apache-2.0
| -3,247,626,394,395,597,000
| 41.55102
| 168
| 0.668106
| false
| 2.970085
| false
| false
| false
|
frerepoulet/ZeroNet
|
src/Config.py
|
1
|
20105
|
import argparse
import sys
import os
import locale
import re
import ConfigParser
class Config(object):
def __init__(self, argv):
self.version = "0.5.4"
self.rev = 2054
self.argv = argv
self.action = None
self.config_file = "zeronet.conf"
self.createParser()
self.createArguments()
def createParser(self):
# Create parser
self.parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter)
self.parser.register('type', 'bool', self.strToBool)
self.subparsers = self.parser.add_subparsers(title="Action to perform", dest="action")
def __str__(self):
return str(self.arguments).replace("Namespace", "Config") # Using argparse str output
# Convert string to bool
def strToBool(self, v):
return v.lower() in ("yes", "true", "t", "1")
# Create command line arguments
def createArguments(self):
trackers = [
"zero://boot3rdez4rzn36x.onion:15441",
"zero://boot.zeronet.io#f36ca555bee6ba216b14d10f38c16f7769ff064e0e37d887603548cc2e64191d:15441",
"udp://tracker.coppersurfer.tk:6969",
"udp://tracker.leechers-paradise.org:6969",
"udp://9.rarbg.com:2710",
"http://tracker.opentrackr.org:1337/announce",
"http://explodie.org:6969/announce",
"http://tracker1.wasabii.com.tw:6969/announce"
]
# Platform specific
if sys.platform.startswith("win"):
coffeescript = "type %s | tools\\coffee\\coffee.cmd"
else:
coffeescript = None
try:
language, enc = locale.getdefaultlocale()
language = language.split("_")[0]
except Exception:
language = "en"
use_openssl = True
if repr(1483108852.565) != "1483108852.565":
fix_float_decimals = True
else:
fix_float_decimals = False
this_file = os.path.abspath(__file__).replace("\\", "/")
if this_file.endswith("/Contents/Resources/core/src/Config.py"):
# Running as ZeroNet.app
if this_file.startswith("/Application") or this_file.startswith("/private") or this_file.startswith(os.path.expanduser("~/Library")):
# Runnig from non-writeable directory, put data to Application Support
start_dir = os.path.expanduser("~/Library/Application Support/ZeroNet").decode(sys.getfilesystemencoding())
else:
# Running from writeable directory put data next to .app
start_dir = re.sub("/[^/]+/Contents/Resources/core/src/Config.py", "", this_file).decode(sys.getfilesystemencoding())
config_file = start_dir + "/zeronet.conf"
data_dir = start_dir + "/data"
log_dir = start_dir + "/log"
elif this_file.endswith("/core/src/Config.py"):
# Running as exe or source is at Application Support directory, put var files to outside of core dir
start_dir = this_file.replace("/core/src/Config.py", "").decode(sys.getfilesystemencoding())
config_file = start_dir + "/zeronet.conf"
data_dir = start_dir + "/data"
log_dir = start_dir + "/log"
else:
config_file = "zeronet.conf"
data_dir = "data"
log_dir = "log"
ip_local = ["127.0.0.1"]
# Main
action = self.subparsers.add_parser("main", help='Start UiServer and FileServer (default)')
# SiteCreate
action = self.subparsers.add_parser("siteCreate", help='Create a new site')
# SiteNeedFile
action = self.subparsers.add_parser("siteNeedFile", help='Get a file from site')
action.add_argument('address', help='Site address')
action.add_argument('inner_path', help='File inner path')
# SiteDownload
action = self.subparsers.add_parser("siteDownload", help='Download a new site')
action.add_argument('address', help='Site address')
# SiteSign
action = self.subparsers.add_parser("siteSign", help='Update and sign content.json: address [privatekey]')
action.add_argument('address', help='Site to sign')
action.add_argument('privatekey', help='Private key (default: ask on execute)', nargs='?')
action.add_argument('--inner_path', help='File you want to sign (default: content.json)',
default="content.json", metavar="inner_path")
action.add_argument('--remove_missing_optional', help='Remove optional files that is not present in the directory', action='store_true')
action.add_argument('--publish', help='Publish site after the signing', action='store_true')
# SitePublish
action = self.subparsers.add_parser("sitePublish", help='Publish site to other peers: address')
action.add_argument('address', help='Site to publish')
action.add_argument('peer_ip', help='Peer ip to publish (default: random peers ip from tracker)',
default=None, nargs='?')
action.add_argument('peer_port', help='Peer port to publish (default: random peer port from tracker)',
default=15441, nargs='?')
action.add_argument('--inner_path', help='Content.json you want to publish (default: content.json)',
default="content.json", metavar="inner_path")
# SiteVerify
action = self.subparsers.add_parser("siteVerify", help='Verify site files using sha512: address')
action.add_argument('address', help='Site to verify')
# dbRebuild
action = self.subparsers.add_parser("dbRebuild", help='Rebuild site database cache')
action.add_argument('address', help='Site to rebuild')
# dbQuery
action = self.subparsers.add_parser("dbQuery", help='Query site sql cache')
action.add_argument('address', help='Site to query')
action.add_argument('query', help='Sql query')
# PeerPing
action = self.subparsers.add_parser("peerPing", help='Send Ping command to peer')
action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port', nargs='?')
# PeerGetFile
action = self.subparsers.add_parser("peerGetFile", help='Request and print a file content from peer')
action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port')
action.add_argument('site', help='Site address')
action.add_argument('filename', help='File name to request')
action.add_argument('--benchmark', help='Request file 10x then displays the total time', action='store_true')
# PeerCmd
action = self.subparsers.add_parser("peerCmd", help='Request and print a file content from peer')
action.add_argument('peer_ip', help='Peer ip')
action.add_argument('peer_port', help='Peer port')
action.add_argument('cmd', help='Command to execute')
action.add_argument('parameters', help='Parameters to command', nargs='?')
# CryptSign
action = self.subparsers.add_parser("cryptSign", help='Sign message using Bitcoin private key')
action.add_argument('message', help='Message to sign')
action.add_argument('privatekey', help='Private key')
# Config parameters
self.parser.add_argument('--verbose', help='More detailed logging', action='store_true')
self.parser.add_argument('--debug', help='Debug mode', action='store_true')
self.parser.add_argument('--debug_socket', help='Debug socket connections', action='store_true')
self.parser.add_argument('--debug_gevent', help='Debug gevent functions', action='store_true')
self.parser.add_argument('--batch', help="Batch mode (No interactive input for commands)", action='store_true')
self.parser.add_argument('--config_file', help='Path of config file', default=config_file, metavar="path")
self.parser.add_argument('--data_dir', help='Path of data directory', default=data_dir, metavar="path")
self.parser.add_argument('--log_dir', help='Path of logging directory', default=log_dir, metavar="path")
self.parser.add_argument('--language', help='Web interface language', default=language, metavar='language')
self.parser.add_argument('--ui_ip', help='Web interface bind address', default="127.0.0.1", metavar='ip')
self.parser.add_argument('--ui_port', help='Web interface bind port', default=43110, type=int, metavar='port')
self.parser.add_argument('--ui_restrict', help='Restrict web access', default=False, metavar='ip', nargs='*')
self.parser.add_argument('--open_browser', help='Open homepage in web browser automatically',
nargs='?', const="default_browser", metavar='browser_name')
self.parser.add_argument('--homepage', help='Web interface Homepage', default='1HeLLo4uzjaLetFx6NH3PMwFP3qbRbTf3D',
metavar='address')
self.parser.add_argument('--updatesite', help='Source code update site', default='1UPDatEDxnvHDo7TXvq6AEBARfNkyfxsp',
metavar='address')
self.parser.add_argument('--size_limit', help='Default site size limit in MB', default=10, type=int, metavar='size')
self.parser.add_argument('--connected_limit', help='Max connected peer per site', default=8, type=int, metavar='connected_limit')
self.parser.add_argument('--workers', help='Download workers per site', default=5, type=int, metavar='workers')
self.parser.add_argument('--fileserver_ip', help='FileServer bind address', default="*", metavar='ip')
self.parser.add_argument('--fileserver_port', help='FileServer bind port', default=15441, type=int, metavar='port')
self.parser.add_argument('--ip_local', help='My local ips', default=ip_local, type=int, metavar='ip', nargs='*')
self.parser.add_argument('--disable_udp', help='Disable UDP connections', action='store_true')
self.parser.add_argument('--proxy', help='Socks proxy address', metavar='ip:port')
self.parser.add_argument('--bind', help='Bind outgoing sockets to this address', metavar='ip')
self.parser.add_argument('--ip_external', help='Set reported external ip (tested on start if None)', metavar='ip')
self.parser.add_argument('--trackers', help='Bootstraping torrent trackers', default=trackers, metavar='protocol://address', nargs='*')
self.parser.add_argument('--trackers_file', help='Load torrent trackers dynamically from a file', default=False, metavar='path')
self.parser.add_argument('--use_openssl', help='Use OpenSSL liblary for speedup',
type='bool', choices=[True, False], default=use_openssl)
self.parser.add_argument('--disable_db', help='Disable database updating', action='store_true')
self.parser.add_argument('--disable_encryption', help='Disable connection encryption', action='store_true')
self.parser.add_argument('--disable_sslcompression', help='Disable SSL compression to save memory',
type='bool', choices=[True, False], default=True)
self.parser.add_argument('--keep_ssl_cert', help='Disable new SSL cert generation on startup', action='store_true')
self.parser.add_argument('--max_files_opened', help='Change maximum opened files allowed by OS to this value on startup',
default=2048, type=int, metavar='limit')
self.parser.add_argument('--stack_size', help='Change thread stack size', default=None, type=int, metavar='thread_stack_size')
self.parser.add_argument('--use_tempfiles', help='Use temporary files when downloading (experimental)',
type='bool', choices=[True, False], default=False)
self.parser.add_argument('--stream_downloads', help='Stream download directly to files (experimental)',
type='bool', choices=[True, False], default=False)
self.parser.add_argument("--msgpack_purepython", help='Use less memory, but a bit more CPU power',
type='bool', choices=[True, False], default=True)
self.parser.add_argument("--fix_float_decimals", help='Fix content.json modification date float precision on verification',
type='bool', choices=[True, False], default=fix_float_decimals)
self.parser.add_argument("--db_mode", choices=["speed", "security"], default="speed")
self.parser.add_argument('--coffeescript_compiler', help='Coffeescript compiler for developing', default=coffeescript,
metavar='executable_path')
self.parser.add_argument('--tor', help='enable: Use only for Tor peers, always: Use Tor for every connection', choices=["disable", "enable", "always"], default='enable')
self.parser.add_argument('--tor_controller', help='Tor controller address', metavar='ip:port', default='127.0.0.1:9051')
self.parser.add_argument('--tor_proxy', help='Tor proxy address', metavar='ip:port', default='127.0.0.1:9050')
self.parser.add_argument('--tor_password', help='Tor controller password', metavar='password')
self.parser.add_argument('--tor_hs_limit', help='Maximum number of hidden services', metavar='limit', type=int, default=10)
self.parser.add_argument('--version', action='version', version='ZeroNet %s r%s' % (self.version, self.rev))
return self.parser
def loadTrackersFile(self):
self.trackers = []
for tracker in open(self.trackers_file):
if "://" in tracker:
self.trackers.append(tracker.strip())
# Find arguments specified for current action
def getActionArguments(self):
back = {}
arguments = self.parser._subparsers._group_actions[0].choices[self.action]._actions[1:] # First is --version
for argument in arguments:
back[argument.dest] = getattr(self, argument.dest)
return back
# Try to find action from argv
def getAction(self, argv):
actions = [action.choices.keys() for action in self.parser._actions if action.dest == "action"][0] # Valid actions
found_action = False
for action in actions: # See if any in argv
if action in argv:
found_action = action
break
return found_action
# Move plugin parameters to end of argument list
def moveUnknownToEnd(self, argv, default_action):
valid_actions = sum([action.option_strings for action in self.parser._actions], [])
valid_parameters = []
plugin_parameters = []
plugin = False
for arg in argv:
if arg.startswith("--"):
if arg not in valid_actions:
plugin = True
else:
plugin = False
elif arg == default_action:
plugin = False
if plugin:
plugin_parameters.append(arg)
else:
valid_parameters.append(arg)
return valid_parameters + plugin_parameters
# Parse arguments from config file and command line
def parse(self, silent=False, parse_config=True):
if silent: # Don't display messages or quit on unknown parameter
original_print_message = self.parser._print_message
original_exit = self.parser.exit
def silencer(parser, function_name):
parser.exited = True
return None
self.parser.exited = False
self.parser._print_message = lambda *args, **kwargs: silencer(self.parser, "_print_message")
self.parser.exit = lambda *args, **kwargs: silencer(self.parser, "exit")
argv = self.argv[:] # Copy command line arguments
self.parseCommandline(argv, silent) # Parse argv
self.setAttributes()
if parse_config:
argv = self.parseConfig(argv) # Add arguments from config file
self.parseCommandline(argv, silent) # Parse argv
self.setAttributes()
if not silent:
if self.fileserver_ip != "*" and self.fileserver_ip not in self.ip_local:
self.ip_local.append(self.fileserver_ip)
if silent: # Restore original functions
if self.parser.exited and self.action == "main": # Argument parsing halted, don't start ZeroNet with main action
self.action = None
self.parser._print_message = original_print_message
self.parser.exit = original_exit
# Parse command line arguments
def parseCommandline(self, argv, silent=False):
# Find out if action is specificed on start
action = self.getAction(argv)
if not action:
argv.append("main")
action = "main"
argv = self.moveUnknownToEnd(argv, action)
if silent:
res = self.parser.parse_known_args(argv[1:])
if res:
self.arguments = res[0]
else:
self.arguments = {}
else:
self.arguments = self.parser.parse_args(argv[1:])
# Parse config file
def parseConfig(self, argv):
# Find config file path from parameters
if "--config_file" in argv:
self.config_file = argv[argv.index("--config_file") + 1]
# Load config file
if os.path.isfile(self.config_file):
config = ConfigParser.ConfigParser(allow_no_value=True)
config.read(self.config_file)
for section in config.sections():
for key, val in config.items(section):
if section != "global": # If not global prefix key with section
key = section + "_" + key
if val:
for line in val.strip().split("\n"): # Allow multi-line values
argv.insert(1, line)
argv.insert(1, "--%s" % key)
return argv
# Expose arguments as class attributes
def setAttributes(self):
# Set attributes from arguments
if self.arguments:
args = vars(self.arguments)
for key, val in args.items():
setattr(self, key, val)
def loadPlugins(self):
from Plugin import PluginManager
@PluginManager.acceptPlugins
class ConfigPlugin(object):
def __init__(self, config):
self.parser = config.parser
self.createArguments()
def createArguments(self):
pass
ConfigPlugin(self)
def saveValue(self, key, value):
if not os.path.isfile(self.config_file):
content = ""
else:
content = open(self.config_file).read()
lines = content.splitlines()
global_line_i = None
key_line_i = None
i = 0
for line in lines:
if line.strip() == "[global]":
global_line_i = i
if line.startswith(key + " = "):
key_line_i = i
i += 1
if value is None: # Delete line
if key_line_i:
del lines[key_line_i]
else: # Add / update
new_line = "%s = %s" % (key, str(value).replace("\n", "").replace("\r", ""))
if key_line_i: # Already in the config, change the line
lines[key_line_i] = new_line
elif global_line_i is None: # No global section yet, append to end of file
lines.append("[global]")
lines.append(new_line)
else: # Has global section, append the line after it
lines.insert(global_line_i + 1, new_line)
open(self.config_file, "w").write("\n".join(lines))
config = Config(sys.argv)
|
gpl-2.0
| 7,540,716,014,782,910,000
| 49.515075
| 177
| 0.608953
| false
| 4.141092
| true
| false
| false
|
waqasbhatti/wcs2kml
|
python/fitsimage.py
|
1
|
16860
|
#!/usr/bin/env python
# Library for treating FITS files as Python Imaging Library objects
# Copyright (c) 2005, 2006, 2007, Jeremy Brewer
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * The names of the contributors may not be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Changelog:
#
# 3/31/08 Fixed overflow errors that were occuring when zscale_range was
# returning weird types for zmin and zmax. Now we force zmin & zmax
# to be of builtin type float for safety.
#
# 10/17/07 Added manual range selection to FitsImage. Fixed typecode for
# numpy to use unsigned 8 bit integers.
#
# 9/25/07 Added call to fits_simple_verify() to verify input file is FITS.
# Removed kwargs from FitsImage() because pyfits doesn't use them.
#
# 9/14/07 Changed array usage from Numeric to numpy. Changed underlying
# FITS I/O library from fitslib to pyfits. Modifications made
# by Christopher Hanley.
#
# 8/20/07 Write arcsinh scaling algorithm and adding scaling options.
# Updated documentation. Dropped number of channels check on
# color -- PIL should handle this instead.
#
# 8/17/07 Wrote new scaling algorithm, percentile_range(), that determines
# the range to use from a configurable percentile cut. Now
# FitsImage() takes optional named arguments to configure which
# contrast algorithm to use. In addition, keyword arguments are
# passed on to Fits() to configure how minor errors are handled.
#
# 7/4/07 Updated to use Numeric. Improved speed of zscale_range().
#
# 10/10/06 Increased accuracy of draw_circle().
#
# 2/7/06 Updated documentation.
#
# 1/4/06 Fixed bug in zscale_range() where num_points and num_pixels
# sometimes differed, resulting in the sigma iteration failing because
# the arrays would differ in length. Now the arrays are both of
# size num_pixels. Some additional checks were also added.
#
# 12/10/05 Updated documentation.
#
# 12/8/05 Now draw_circle will not draw points that lie outside of the image.
#
# 12/7/05 Wrote zscale_range() function which implements the ds9 zscale
# autocontrast algorithm for FITs images. Wrote a new version of
# asImage(), now called FitsImage(), that returns a PIL Image object
# without use of the convert commandline utility. Rewrote convert()
# and resize() methods so that they do not have to use the convert
# command externally. Removed all of the other asImage() methods
# that weren't working.
"""
Module for treating a FITS image as a Python Imaging Library (PIL) object.
This is extremely useful if you want to convert a FITS image to jpeg and/or
perform various operations on it (such as drawing).
The contrast for FITS images is determined using the zscale algorithm by
default, but this can be configured with various options. See the
documentation for zscale_range() and percentile_range() for more information.
Example Usage:
# image is a full PIL object
image = fitsimage.FitsImage("foo.fits")
image.save("foo.jpg")
"""
__author__ = "Jeremy Brewer (jeremy.d.brewer@gmail.com)"
__copyright__ = "Copyright 2005, 2006, 2007 Jeremy Brewer"
__license__ = "BSD"
__version__ = "1.1"
import os
import sys
import cmath
import fitslib
import pyfits
import pointarray
import Image
import ImageDraw
import numpy
def zscale_range(image_data, contrast=0.25, num_points=600, num_per_row=120):
"""
Computes the range of pixel values to use when adjusting the contrast
of FITs images using the zscale algorithm. The zscale algorithm
originates in Iraf. More information about it can be found in the help
section for DISPLAY in Iraf.
Briefly, the zscale algorithm uses an evenly distributed subsample of the
input image instead of a full histogram. The subsample is sorted by
intensity and then fitted with an iterative least squares fit algorithm.
The endpoints of this fit give the range of pixel values to use when
adjusting the contrast.
Input: image_data -- the array of data contained in the FITs image
(must have 2 dimensions)
contrast -- the contrast parameter for the zscale algorithm
num_points -- the number of points to use when sampling the
image data
num_per_row -- number of points per row when sampling
Return: 1.) The minimum pixel value to use when adjusting contrast
2.) The maximum pixel value to use when adjusting contrast
"""
# check input shape
if len(image_data.shape) != 2:
raise ValueError("input data is not an image")
# check contrast
if contrast <= 0.0:
contrast = 1.0
# check number of points to use is sane
if num_points > numpy.size(image_data) or num_points < 0:
num_points = 0.5 * numpy.size(image_data)
# determine the number of points in each column
num_per_col = int(float(num_points) / float(num_per_row) + 0.5)
# integers that determine how to sample the control points
xsize, ysize = image_data.shape
row_skip = float(xsize - 1) / float(num_per_row - 1)
col_skip = float(ysize - 1) / float(num_per_col - 1)
# create a regular subsampled grid which includes the corners and edges,
# indexing from 0 to xsize - 1, ysize - 1
data = []
for i in xrange(num_per_row):
x = int(i * row_skip + 0.5)
for j in xrange(num_per_col):
y = int(j * col_skip + 0.5)
data.append(image_data[x, y])
# actual number of points selected
num_pixels = len(data)
# sort the data by intensity
data.sort()
# check for a flat distribution of pixels
data_min = min(data)
data_max = max(data)
center_pixel = (num_pixels + 1) / 2
if data_min == data_max:
return data_min, data_max
# compute the median
if num_pixels % 2 == 0:
median = data[center_pixel - 1]
else:
median = 0.5 * (data[center_pixel - 1] + data[center_pixel])
# compute an iterative fit to intensity
pixel_indeces = map(float, xrange(num_pixels))
points = pointarray.PointArray(pixel_indeces, data, min_err=1.0e-4)
fit = points.sigmaIterate()
num_allowed = 0
for pt in points.allowedPoints():
num_allowed += 1
if num_allowed < int(num_pixels / 2.0):
return data_min, data_max
# compute the limits
z1 = median - (center_pixel - 1) * (fit.slope / contrast)
z2 = median + (num_pixels - center_pixel) * (fit.slope / contrast)
if z1 > data_min:
zmin = z1
else:
zmin = data_min
if z2 < data_max:
zmax = z2
else:
zmax = data_max
# last ditch sanity check
if zmin >= zmax:
zmin = data_min
zmax = data_max
return zmin, zmax
def percentile_range(image_data, min_percent=3.0, max_percent=99.0,
num_points=5000, num_per_row=250):
"""
Computes the range of pixel values to use when adjusting the contrast
of FITs images using a simple percentile cut. For efficiency reasons,
only a subsample of the input image data is used.
Input: image_data -- the array of data contained in the FITs image
(must have 2 dimensions)
min_percent -- min percent value between (0, 100)
max_percent -- max percent value between (0, 100)
num_points -- the number of points to use when sampling the
image data
num_per_row -- number of points per row when sampling
Return: 1.) The minimum pixel value to use when adjusting contrast
2.) The maximum pixel value to use when adjusting contrast
"""
if not 0 <= min_percent <= 100:
raise ValueError("invalid value for min percent '%s'" % min_percent)
elif not 0 <= max_percent <= 100:
raise ValueError("invalid value for max percent '%s'" % max_percent)
min_percent = float(min_percent) / 100.0
max_percent = float(max_percent) / 100.0
# check input shape
if len(image_data.shape) != 2:
raise ValueError("input data is not an image")
# check number of points to use is sane
if num_points > numpy.size(image_data) or num_points < 0:
num_points = 0.5 * numpy.size(image_data)
# determine the number of points in each column
num_per_col = int(float(num_points) / float(num_per_row) + 0.5)
# integers that determine how to sample the control points
xsize, ysize = image_data.shape
row_skip = float(xsize - 1) / float(num_per_row - 1)
col_skip = float(ysize - 1) / float(num_per_col - 1)
# create a regular subsampled grid which includes the corners and edges,
# indexing from 0 to xsize - 1, ysize - 1
data = []
for i in xrange(num_per_row):
x = int(i * row_skip + 0.5)
for j in xrange(num_per_col):
y = int(j * col_skip + 0.5)
data.append(image_data[x, y])
# perform a simple percentile cut
data.sort()
zmin = data[int(min_percent * len(data))]
zmax = data[int(max_percent * len(data))]
return zmin, zmax
def FitsImage(fitsfile, contrast="zscale", contrast_opts={}, scale="linear",
scale_opts={}):
"""
Constructor-like function that returns a Python Imaging Library (PIL)
Image object. This allows extremely easy and powerful manipulation of
FITS files as images. The contrast is automatically adjusted using the
zscale algorithm (see zscale_range() above).
Input: fitsfile -- a FITS image filename
contrast -- the algorithm for determining the min/max
values in the FITS pixel data to use when
compressing the dynamic range of the FITS
data to something visible by the eye, either
"zscale", "percentile", or "manual"
contrast_opts -- options for the contrast algorithm, see
the optional args of [contrast]_range()
for what to name the keys
scale -- how to scale the pixel values between the
min/max values from the contrast
algorithm when converting to a a raster
format, either "linear" or "arcsinh"
scale_opts -- options for the scaling algorithm, currently
only "nonlinearity" is supported for arcsinh,
which has a default value of 3
"""
if contrast not in ("zscale", "percentile", "manual"):
raise ValueError("invalid contrast algorithm '%s'" % contrast)
if scale not in ("linear", "arcsinh"):
raise ValueError("invalid scale value '%s'" % scale)
# open the fits file and read the image data and size
fitslib.fits_simple_verify(fitsfile)
fits = pyfits.open(fitsfile)
try:
hdr = fits[0].header
xsize = hdr["NAXIS1"]
ysize = hdr["NAXIS2"]
fits_data = fits[0].data
finally:
fits.close()
# compute the proper scaling for the image
if contrast == "zscale":
contrast_value = contrast_opts.get("contrast", 0.25)
num_points = contrast_opts.get("num_points", 600)
num_per_row = contrast_opts.get("num_per_row", 120)
zmin, zmax = zscale_range(fits_data, contrast=contrast_value,
num_points=num_points,
num_per_row=num_per_row)
elif contrast == "percentile":
min_percent = contrast_opts.get("min_percent", 3.0)
max_percent = contrast_opts.get("max_percent", 99.0)
num_points = contrast_opts.get("num_points", 5000)
num_per_row = contrast_opts.get("num_per_row", 250)
zmin, zmax = percentile_range(fits_data, min_percent=min_percent,
max_percent=max_percent,
num_points=num_points,
num_per_row=num_per_row)
elif contrast == "manual":
zmin = contrast_opts.get("min", None)
zmax = contrast_opts.get("max", None)
if zmin is None:
zmin = data.min()
if zmax is None:
zmax = data.max()
# sometimes the zscale_range or other numpy routines return different types
# for zmin and zmax (e.g. float32 and float64), which results in overflow
# errors below
zmin = float(zmin)
zmax = float(zmax)
fits_data = numpy.where(fits_data > zmin, fits_data, zmin)
fits_data = numpy.where(fits_data < zmax, fits_data, zmax)
if scale == "linear":
scaled_data = (fits_data - zmin) * (255.0 / (zmax - zmin)) + 0.5
elif scale == "arcsinh":
# nonlinearity sets the range over which we sample values of the
# asinh function; values near 0 are linear and values near infinity
# are logarithmic
nonlinearity = scale_opts.get("nonlinearity", 3.0)
nonlinearity = max(nonlinearity, 0.001)
max_asinh = cmath.asinh(nonlinearity).real
scaled_data = (255.0 / max_asinh) * \
(numpy.arcsinh((fits_data - zmin) * \
(nonlinearity / (zmax - zmin))))
# convert to 8 bit unsigned int
scaled_data = scaled_data.astype("B")
# create the image
image = Image.frombuffer("L", (xsize, ysize), scaled_data, "raw", "L", 0, 0)
return image
def draw_circle(image, x, y, radius, color):
"""
Draws a circle on image at position x, y with the given radius and
color.
Input: image -- the image object to draw the circle on
x -- the x position of the center of the circle
y -- the y position of the center of the circle
radius -- the radius of the circle in pixels
color -- a tuple containing the color of the border of the
circle, ranging from 0 to 255 for each channel
"""
# arc takes the upper left and lower right corners of a box bounding the
# circle as arguments. Here (x1, y1) gives the coordinates of the upper left
# corner and (x2, y2) gives the lower right corner of the bounding box.
x1 = int(x - radius + 0.5)
y1 = int(y - radius + 0.5)
x2 = int(x + radius + 0.5)
y2 = int(y + radius + 0.5)
xsize, ysize = image.size
# draw the circle
draw = ImageDraw.Draw(image)
draw.arc((x1, y1, x2, y2), 0, 360, fill=color)
def main(argv):
import time
if len(argv) != 2:
print "Usage: %s <fits-file>" % os.path.basename(argv[0])
print "Input file will be converted to JPEG"
sys.exit(2)
# FITS image to open and JPEG counterpart
fitsfile = argv[1]
name, ext = os.path.splitext(fitsfile)
jpegfile = "%s.jpg" % name
# open as PIL object
start = time.time()
image = FitsImage(fitsfile).convert("RGB")
stop = time.time()
print "Converting to PIL object took %f sec" % (stop - start)
# save as a jpeg
start = time.time()
image.save(jpegfile)
stop = time.time()
print "Saving to '%s' took %f sec" % (jpegfile, stop - start)
if __name__ == "__main__":
main(sys.argv)
|
bsd-3-clause
| -7,967,885,862,647,529,000
| 37.581236
| 80
| 0.629419
| false
| 3.837051
| false
| false
| false
|
jj0hns0n/geonode
|
geonode/layers/models.py
|
1
|
18480
|
# -*- coding: utf-8 -*-
#########################################################################
#
# Copyright (C) 2016 OSGeo
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#########################################################################
import uuid
import logging
from datetime import datetime
from django.db import models
from django.db.models import signals
from django.contrib.contenttypes.models import ContentType
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import reverse
from django.core.files.storage import FileSystemStorage
from geonode.base.models import ResourceBase, ResourceBaseManager, resourcebase_post_save
from geonode.people.utils import get_valid_user
from agon_ratings.models import OverallRating
from geonode.utils import check_shp_columnnames
from geonode.security.models import remove_object_permissions
logger = logging.getLogger("geonode.layers.models")
shp_exts = ['.shp', ]
csv_exts = ['.csv']
kml_exts = ['.kml']
vec_exts = shp_exts + csv_exts + kml_exts
cov_exts = ['.tif', '.tiff', '.geotiff', '.geotif', '.asc']
TIME_REGEX = (
('[0-9]{8}', _('YYYYMMDD')),
('[0-9]{8}T[0-9]{6}', _("YYYYMMDD'T'hhmmss")),
('[0-9]{8}T[0-9]{6}Z', _("YYYYMMDD'T'hhmmss'Z'")),
)
TIME_REGEX_FORMAT = {
'[0-9]{8}': '%Y%m%d',
'[0-9]{8}T[0-9]{6}': '%Y%m%dT%H%M%S',
'[0-9]{8}T[0-9]{6}Z': '%Y%m%dT%H%M%SZ'
}
class Style(models.Model):
"""Model for storing styles.
"""
name = models.CharField(_('style name'), max_length=255, unique=True)
sld_title = models.CharField(max_length=255, null=True, blank=True)
sld_body = models.TextField(_('sld text'), null=True, blank=True)
sld_version = models.CharField(
_('sld version'),
max_length=12,
null=True,
blank=True)
sld_url = models.CharField(_('sld url'), null=True, max_length=1000)
workspace = models.CharField(max_length=255, null=True, blank=True)
def __str__(self):
return "%s" % self.name.encode('utf-8')
def absolute_url(self):
if self.sld_url:
if self.sld_url.startswith(settings.OGC_SERVER['default']['LOCATION']):
return self.sld_url.split(settings.OGC_SERVER['default']['LOCATION'], 1)[1]
elif self.sld_url.startswith(settings.OGC_SERVER['default']['PUBLIC_LOCATION']):
return self.sld_url.split(settings.OGC_SERVER['default']['PUBLIC_LOCATION'], 1)[1]
return self.sld_url
else:
logger.error("SLD URL is empty for Style %s" % self.name.encode('utf-8'))
return None
class LayerManager(ResourceBaseManager):
def __init__(self):
models.Manager.__init__(self)
class Layer(ResourceBase):
"""
Layer (inherits ResourceBase fields)
"""
# internal fields
objects = LayerManager()
workspace = models.CharField(max_length=128)
store = models.CharField(max_length=128)
storeType = models.CharField(max_length=128)
name = models.CharField(max_length=128)
typename = models.CharField(max_length=128, null=True, blank=True)
is_mosaic = models.BooleanField(default=False)
has_time = models.BooleanField(default=False)
has_elevation = models.BooleanField(default=False)
time_regex = models.CharField(max_length=128, null=True, blank=True, choices=TIME_REGEX)
elevation_regex = models.CharField(max_length=128, null=True, blank=True)
default_style = models.ForeignKey(
Style,
related_name='layer_default_style',
null=True,
blank=True)
styles = models.ManyToManyField(Style, related_name='layer_styles')
charset = models.CharField(max_length=255, default='UTF-8')
upload_session = models.ForeignKey('UploadSession', blank=True, null=True)
@property
def is_remote(self):
return self.storeType == "remoteStore"
@property
def service(self):
"""Get the related service object dynamically
"""
service_layers = self.servicelayer_set.all()
if len(service_layers) == 0:
return None
else:
return service_layers[0].service
def is_vector(self):
return self.storeType == 'dataStore'
@property
def display_type(self):
return ({
"dataStore": "Vector Data",
"coverageStore": "Raster Data",
}).get(self.storeType, "Data")
@property
def data_model(self):
if hasattr(self, 'modeldescription_set'):
lmd = self.modeldescription_set.all()
if lmd.exists():
return lmd.get().get_django_model()
return None
@property
def data_objects(self):
if self.data_model is not None:
return self.data_model.objects.using('datastore')
return None
@property
def service_type(self):
if self.storeType == 'coverageStore':
return "WCS"
if self.storeType == 'dataStore':
return "WFS"
@property
def ows_url(self):
if self.is_remote:
return self.service.base_url
else:
return settings.OGC_SERVER['default']['PUBLIC_LOCATION'] + "wms"
@property
def ptype(self):
if self.is_remote:
return self.service.ptype
else:
return "gxp_wmscsource"
@property
def service_typename(self):
if self.is_remote:
return "%s:%s" % (self.service.name, self.typename)
else:
return self.typename
@property
def attributes(self):
return self.attribute_set.exclude(attribute='the_geom')
def get_base_file(self):
"""Get the shp or geotiff file for this layer.
"""
# If there was no upload_session return None
if self.upload_session is None:
return None, None
base_exts = [x.replace('.', '') for x in cov_exts + vec_exts]
base_files = self.upload_session.layerfile_set.filter(
name__in=base_exts)
base_files_count = base_files.count()
# If there are no files in the upload_session return None
if base_files_count == 0:
return None, None
msg = 'There should only be one main file (.shp or .geotiff or .asc), found %s' % base_files_count
assert base_files_count == 1, msg
# we need to check, for shapefile, if column names are valid
list_col = None
if self.storeType == 'dataStore':
valid_shp, wrong_column_name, list_col = check_shp_columnnames(self)
if wrong_column_name:
msg = 'Shapefile has an invalid column name: %s' % wrong_column_name
else:
msg = _('File cannot be opened, maybe check the encoding')
assert valid_shp, msg
# no error, let's return the base files
return base_files.get(), list_col
def get_absolute_url(self):
return reverse('layer_detail', args=(self.service_typename,))
def attribute_config(self):
# Get custom attribute sort order and labels if any
cfg = {}
visible_attributes = self.attribute_set.visible()
if (visible_attributes.count() > 0):
cfg["getFeatureInfo"] = {
"fields": [l.attribute for l in visible_attributes],
"propertyNames": dict([(l.attribute, l.attribute_label) for l in visible_attributes])
}
return cfg
def __str__(self):
if self.typename is not None:
return "%s Layer" % self.service_typename.encode('utf-8')
elif self.name is not None:
return "%s Layer" % self.name
else:
return "Unamed Layer"
class Meta:
# custom permissions,
# change and delete are standard in django-guardian
permissions = (
('change_layer_data', 'Can edit layer data'),
('change_layer_style', 'Can change layer style'),
)
# Permission Level Constants
# LEVEL_NONE inherited
LEVEL_READ = 'layer_readonly'
LEVEL_WRITE = 'layer_readwrite'
LEVEL_ADMIN = 'layer_admin'
def maps(self):
from geonode.maps.models import MapLayer
return MapLayer.objects.filter(name=self.typename)
@property
def class_name(self):
return self.__class__.__name__
@property
def geogig_enabled(self):
return (len(self.link_set.geogig()) > 0)
@property
def geogig_link(self):
if(self.geogig_enabled):
return getattr(self.link_set.filter(name__icontains='clone in geogig').first(), 'url', None)
return None
class UploadSession(models.Model):
"""Helper class to keep track of uploads.
"""
date = models.DateTimeField(auto_now=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL)
processed = models.BooleanField(default=False)
error = models.TextField(blank=True, null=True)
traceback = models.TextField(blank=True, null=True)
context = models.TextField(blank=True, null=True)
def successful(self):
return self.processed and self.errors is None
class LayerFile(models.Model):
"""Helper class to store original files.
"""
upload_session = models.ForeignKey(UploadSession)
name = models.CharField(max_length=255)
base = models.BooleanField(default=False)
file = models.FileField(upload_to='layers',
storage=FileSystemStorage(base_url=settings.LOCAL_MEDIA_URL), max_length=255)
class AttributeManager(models.Manager):
"""Helper class to access filtered attributes
"""
def visible(self):
return self.get_queryset().filter(
visible=True).order_by('display_order')
class Attribute(models.Model):
"""
Auxiliary model for storing layer attributes.
This helps reduce the need for runtime lookups
to other servers, and lets users customize attribute titles,
sort order, and visibility.
"""
layer = models.ForeignKey(
Layer,
blank=False,
null=False,
unique=False,
related_name='attribute_set')
attribute = models.CharField(
_('attribute name'),
help_text=_('name of attribute as stored in shapefile/spatial database'),
max_length=255,
blank=False,
null=True,
unique=False)
description = models.CharField(
_('attribute description'),
help_text=_('description of attribute to be used in metadata'),
max_length=255,
blank=True,
null=True)
attribute_label = models.CharField(
_('attribute label'),
help_text=_('title of attribute as displayed in GeoNode'),
max_length=255,
blank=True,
null=True,
unique=False)
attribute_type = models.CharField(
_('attribute type'),
help_text=_('the data type of the attribute (integer, string, geometry, etc)'),
max_length=50,
blank=False,
null=False,
default='xsd:string',
unique=False)
visible = models.BooleanField(
_('visible?'),
help_text=_('specifies if the attribute should be displayed in identify results'),
default=True)
display_order = models.IntegerField(
_('display order'),
help_text=_('specifies the order in which attribute should be displayed in identify results'),
default=1)
# statistical derivations
count = models.IntegerField(
_('count'),
help_text=_('count value for this field'),
default=1)
min = models.CharField(
_('min'),
help_text=_('minimum value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
max = models.CharField(
_('max'),
help_text=_('maximum value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
average = models.CharField(
_('average'),
help_text=_('average value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
median = models.CharField(
_('median'),
help_text=_('median value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
stddev = models.CharField(
_('standard deviation'),
help_text=_('standard deviation for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
sum = models.CharField(
_('sum'),
help_text=_('sum value for this field'),
max_length=255,
blank=False,
null=True,
unique=False,
default='NA')
unique_values = models.TextField(
_('unique values for this field'),
null=True,
blank=True,
default='NA')
last_stats_updated = models.DateTimeField(
_('last modified'),
default=datetime.now,
help_text=_('date when attribute statistics were last updated')) # passing the method itself, not
objects = AttributeManager()
def __str__(self):
return "%s" % self.attribute_label.encode(
"utf-8") if self.attribute_label else self.attribute.encode("utf-8")
def unique_values_as_list(self):
return self.unique_values.split(',')
def pre_save_layer(instance, sender, **kwargs):
if kwargs.get('raw', False):
instance.owner = instance.resourcebase_ptr.owner
instance.uuid = instance.resourcebase_ptr.uuid
instance.bbox_x0 = instance.resourcebase_ptr.bbox_x0
instance.bbox_x1 = instance.resourcebase_ptr.bbox_x1
instance.bbox_y0 = instance.resourcebase_ptr.bbox_y0
instance.bbox_y1 = instance.resourcebase_ptr.bbox_y1
if instance.abstract == '' or instance.abstract is None:
instance.abstract = unicode(_('No abstract provided'))
if instance.title == '' or instance.title is None:
instance.title = instance.name
# Set a default user for accountstream to work correctly.
if instance.owner is None:
instance.owner = get_valid_user()
if instance.uuid == '':
instance.uuid = str(uuid.uuid1())
if instance.typename is None:
# Set a sensible default for the typename
instance.typename = 'geonode:%s' % instance.name
base_file, info = instance.get_base_file()
if info:
instance.info = info
if base_file is not None:
extension = '.%s' % base_file.name
if extension in vec_exts:
instance.storeType = 'dataStore'
elif extension in cov_exts:
instance.storeType = 'coverageStore'
# Set sane defaults for None in bbox fields.
if instance.bbox_x0 is None:
instance.bbox_x0 = -180
if instance.bbox_x1 is None:
instance.bbox_x1 = 180
if instance.bbox_y0 is None:
instance.bbox_y0 = -90
if instance.bbox_y1 is None:
instance.bbox_y1 = 90
bbox = [
instance.bbox_x0,
instance.bbox_x1,
instance.bbox_y0,
instance.bbox_y1]
instance.set_bounds_from_bbox(bbox)
def pre_delete_layer(instance, sender, **kwargs):
"""
Remove any associated style to the layer, if it is not used by other layers.
Default style will be deleted in post_delete_layer
"""
if instance.is_remote:
# we need to delete the maplayers here because in the post save layer.service is not available anymore
# REFACTOR
from geonode.maps.models import MapLayer
if instance.typename:
logger.debug(
"Going to delete associated maplayers for [%s]",
instance.typename.encode('utf-8'))
MapLayer.objects.filter(
name=instance.typename,
ows_url=instance.ows_url).delete()
return
logger.debug(
"Going to delete the styles associated for [%s]",
instance.typename.encode('utf-8'))
ct = ContentType.objects.get_for_model(instance)
OverallRating.objects.filter(
content_type=ct,
object_id=instance.id).delete()
default_style = instance.default_style
for style in instance.styles.all():
if style.layer_styles.all().count() == 1:
if style != default_style:
style.delete()
# Delete object permissions
remove_object_permissions(instance)
def post_delete_layer(instance, sender, **kwargs):
"""
Removed the layer from any associated map, if any.
Remove the layer default style.
"""
if instance.is_remote:
return
from geonode.maps.models import MapLayer
if instance.typename:
logger.debug(
"Going to delete associated maplayers for [%s]",
instance.typename.encode('utf-8'))
MapLayer.objects.filter(
name=instance.typename,
ows_url=instance.ows_url).delete()
if instance.typename:
logger.debug(
"Going to delete the default style for [%s]",
instance.typename.encode('utf-8'))
if instance.default_style and Layer.objects.filter(
default_style__id=instance.default_style.id).count() == 0:
instance.default_style.delete()
try:
if instance.upload_session:
for lf in instance.upload_session.layerfile_set.all():
lf.file.delete()
except UploadSession.DoesNotExist:
pass
signals.pre_save.connect(pre_save_layer, sender=Layer)
signals.post_save.connect(resourcebase_post_save, sender=Layer)
signals.pre_delete.connect(pre_delete_layer, sender=Layer)
signals.post_delete.connect(post_delete_layer, sender=Layer)
|
gpl-3.0
| -5,706,157,803,603,171,000
| 30.752577
| 110
| 0.613528
| false
| 3.978471
| false
| false
| false
|
chryswoods/SireTests
|
unittests/SireMove/rigidbodymd.py
|
1
|
1966
|
from Sire.Mol import *
from Sire.IO import *
from Sire.Vol import *
from Sire.FF import *
from Sire.MM import *
from Sire.CAS import *
from Sire.Maths import *
from Sire.Qt import *
from Sire.Units import *
from Sire.System import *
from Sire.Move import *
from Sire.Stream import *
import sys
mols = PDB().read("test/io/water.pdb")
print("Read in %d molecules!" % mols.nMolecules())
mol = mols.moleculeAt(0).molecule()
mol = mol.edit().atom( AtomName("O00") ) \
.setProperty("LJ", LJParameter(3.15363*angstrom, \
0.15500*kcal_per_mol)).molecule() \
.atom( AtomName("H01") ) \
.setProperty("charge", 0.520 * mod_electron).molecule() \
.atom( AtomName("H02") ) \
.setProperty("charge", 0.520 * mod_electron).molecule() \
.atom( AtomName("M03") ) \
.setProperty("charge", -1.04 * mod_electron).molecule() \
.commit()
charges = mol.property("charge")
ljs = mol.property("LJ")
cljff = InterCLJFF("water-water")
cljff.add(mol)
solvent = MoleculeGroup("solvent")
solvent.add(mol)
for i in range(1,7):
mol = mols.moleculeAt(i).molecule()
mol = mol.edit().rename("T4P") \
.setProperty("charge", charges) \
.setProperty("LJ", ljs) \
.commit()
solvent.add(mol)
cljff.add(mol)
system = System()
system.add(solvent)
system.add(cljff)
print(system.energy())
rbmove = MolecularDynamics( solvent, DLMRigidBody(), 1*femtosecond )
#rbmove.setEnergyComponent( cljff.components().coulomb() )
PDB().write(system.molecules(), "test0000.pdb")
for i in range(1,1000):
rbmove.move(system, 10)
print(i, system.energy())
print(rbmove.kineticEnergy(), (system.energy() + rbmove.kineticEnergy()))
PDB().write(system.molecules(), "test%0004d.pdb" % i)
|
gpl-2.0
| 7,023,810,924,964,318,000
| 25.931507
| 86
| 0.581384
| false
| 3.17609
| false
| false
| false
|
houssemFat/bloodOn
|
bloodon/accounts/social/providers/google/views.py
|
1
|
1089
|
import requests
from bloodon.accounts.social.providers.oauth2.views import (OAuth2Adapter,
OAuth2LoginView,
OAuth2CallbackView)
from .provider import GoogleProvider
class GoogleOAuth2Adapter(OAuth2Adapter):
provider_id = GoogleProvider.id
access_token_url = 'https://accounts.google.com/o/oauth2/token'
authorize_url = 'https://accounts.google.com/o/oauth2/auth'
profile_url = 'https://www.googleapis.com/oauth2/v1/userinfo'
def complete_login(self, request, token, **kwargs):
resp = requests.get(self.profile_url,
params={'access_token': token.token,
'alt': 'json'})
extra_data = resp.json()
provider = self.get_provider()
login = provider.social_login_from_response(request, extra_data)
return login
oauth2_login = OAuth2LoginView.adapter_view(GoogleOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(GoogleOAuth2Adapter)
|
mit
| -3,685,332,758,195,466,000
| 39.333333
| 79
| 0.613407
| false
| 4.172414
| false
| false
| false
|
dtimes6/JustForFun
|
hihi.sleekxmpp.aiml.py
|
1
|
2851
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os.path
import sys
import logging
import getpass
import aiml
from optparse import OptionParser
import sleekxmpp
if sys.version_info < (3, 0):
from sleekxmpp.util.misc_ops import setdefaultencoding
setdefaultencoding('utf8')
else:
raw_input = input
class HiHiBot(sleekxmpp.ClientXMPP):
def __init__(self, jid, password):
self.aiml = aiml.Kernel()
if os.path.isfile("standard.brn"):
self.aiml.bootstrap(brainFile="standard.brn")
else:
self.aiml.bootstrap(learnFiles="std-startup.xml", commands="load aiml b")
self.aiml.saveBrain("standard.brn")
self.aiml.setBotPredicate("name", "海洋")
sleekxmpp.ClientXMPP.__init__(self, jid, password)
self.add_event_handler("session_start", self.start)
self.add_event_handler("message", self.message)
def start(self, event):
self.send_presence()
self.get_roster()
def message(self, msg):
if msg['type'] in ('chat', 'normal'):
result = self.aiml.respond(msg["body"], msg["from"])
if result:
msg.reply(result).send()
if __name__ == '__main__':
# Setup the command line arguments.
optp = OptionParser()
# Output verbosity options.
optp.add_option('-q', '--quiet', help='set logging to ERROR',
action='store_const', dest='loglevel',
const=logging.ERROR, default=logging.INFO)
optp.add_option('-d', '--debug', help='set logging to DEBUG',
action='store_const', dest='loglevel',
const=logging.DEBUG, default=logging.INFO)
optp.add_option('-v', '--verbose', help='set logging to COMM',
action='store_const', dest='loglevel',
const=5, default=logging.INFO)
# JID and password options.
optp.add_option("-j", "--jid", dest="jid",
help="JID to use")
optp.add_option("-p", "--password", dest="password",
help="password to use")
opts, args = optp.parse_args()
# Setup logging.
logging.basicConfig(level=opts.loglevel,
format='%(levelname)-8s %(message)s')
if opts.jid is None:
opts.jid = raw_input("Username: ")
if opts.password is None:
opts.password = getpass.getpass("Password: ")
xmpp = HiHiBot(opts.jid, opts.password)
xmpp.register_plugin('xep_0030') # Service Discovery
xmpp.register_plugin('xep_0004') # Data Forms
xmpp.register_plugin('xep_0060') # PubSub
xmpp.register_plugin('xep_0199') # XMPP Ping
# Connect to the XMPP server and start processing XMPP stanzas.
if xmpp.connect():
xmpp.process(block=True)
print("Done")
else:
print("Unable to connect.")
|
mit
| -3,853,410,436,045,851,000
| 31.724138
| 85
| 0.596066
| false
| 3.631378
| false
| false
| false
|
SeanEstey/Bravo
|
app/tests/main/test_leaderboard.py
|
1
|
1190
|
'''app.tests.main.test_leaderboard'''
import logging, unittest, json
from flask import g
from app.tests.__init__ import *
from app import get_keys
from app.main import leaderboard
from logging import getLogger
log = getLogger(__name__)
class LeaderboardTests(unittest.TestCase):
def setUp(self):
init(self)
login_self(self)
login_client(self.client)
def tearDown(self):
logout(self.client)
def _test_get_all_ytd(self):
leaderboard.get_all_ytd('vec')
def test_get_rank(self):
leaderboard.get_rank('Deer Ridge', 'vec')
leaderboard.get_rank('Bowness', 'vec')
leaderboard.get_rank('Citadel', 'vec')
leaderboard.get_rank('Varsity', 'vec')
leaderboard.get_rank('Hawkwood', 'vec')
def _test_update_accts(self):
query = 'foo'
group = 'vec'
leaderboard.update_accts(query, group)
def _test_update_leaderboard_task(self):
from app.main import tasks
try:
tasks.update_leaderboard_accts.delay(group='vec')
except Exception as e:
log.debug('exc=%s', str(e), exc_info=True)
if __name__ == '__main__':
unittest.main()
|
gpl-2.0
| 6,378,706,726,770,708,000
| 27.333333
| 61
| 0.621849
| false
| 3.459302
| true
| false
| false
|
gary-pickens/HouseMonitor
|
housemonitor/outputs/xmlrpc/outputthread.py
|
1
|
3101
|
'''
Created on 2012-10-20
@author: Gary
'''
from housemonitor.lib.base import Base
from housemonitor.lib.constants import Constants
from pprint import pprint
from SimpleXMLRPCServer import SimpleXMLRPCServer
import pprint
import threading
import time
import os
from housemonitor.inputs.dataenvelope import DataEnvelope
class XmlRpcOutputThread( Base, threading.Thread ):
'''
'''
__host = '0.0.0.0'
__port = 9002
__current_values = None
__input_queue = None
def __init__( self, current_values, input_queue ):
'''
'''
super( XmlRpcOutputThread, self ).__init__()
threading.Thread.__init__( self )
self.__current_values = current_values
self.__input_queue = input_queue
''' Make sure and enter the appropriate entry in the logging configuration
file
'''
@property
def logger_name( self ):
''' Set the logger level. '''
return Constants.LogKeys.outputsXMLRPC
def change_dio( self, value, device, port, steps ):
try:
env = DataEnvelope( type=Constants.EnvelopeTypes.COMMAND, value=value,
device=device, port=port, steps=steps )
self.__input_queue.transmit( env, self.__input_queue.HIGH_PRIORITY )
self.logger.debug(
"send command: value = {} device = {} port = {} steps = {}".
format( value, device, port, steps ) )
except Exception as ex:
self.logger.exception( "Exception in {}".format( __name__ ) )
return value
def send_command( self, value, device, port, steps ):
try:
env = DataEnvelope( type=Constants.EnvelopeTypes.COMMAND, value=value,
device=device, port=port, steps=steps )
self.__input_queue.transmit( env, self.__input_queue.MID_PRIORITY )
self.logger.debug(
"send command: value = {} device = {} port = {} steps = {}".
format( value, device, port, steps ) )
except Exception as ex:
self.logger.exception( "Exception in {}".format( __name__ ) )
return value
def get_current_value( self, device, port ):
value = self.__current_values.get( device, port )
self.logger.debug(
"get current value: device = {} port = {} value = {}".
format( device, port, value ) )
return value
def get_current_values( self ):
self.logger.debug( 'get_current_values called' )
cv = self.__current_values.get()
self.logger.debug( 'current_values = ', pprint.pformat( cv ) )
return cv
def run( self ):
server = SimpleXMLRPCServer( ( self.__host, self.__port ), logRequests=False )
server.register_introspection_functions()
server.register_function( self.get_current_value )
server.register_function( self.get_current_values )
server.register_function( self.send_command )
server.register_function( self.change_dio )
server.serve_forever()
|
mit
| -2,624,293,600,049,674,000
| 34.238636
| 86
| 0.5911
| false
| 4.253772
| false
| false
| false
|
florensacc/snn4hrl
|
regressors/latent_regressor.py
|
1
|
11889
|
import numpy as np
from rllab.core.serializable import Serializable
from rllab.core.parameterized import Parameterized
from rllab.misc import logger
# the regressor will be choosen to be from the same distribution as the latents
from rllab.regressors.gaussian_mlp_regressor import GaussianMLPRegressor
from rllab.regressors.categorical_mlp_regressor import CategoricalMLPRegressor # could be Categorical_oneAxis
from sandbox.snn4hrl.regressors.categorical_recurrent_regressor import CategoricalRecurrentRegressor
from sandbox.snn4hrl.regressors.bernoulli_mlp_regressor import BernoulliMLPRegressor
from sandbox.snn4hrl.regressors.bernoulli_recurrent_regressor import BernoulliRecurrentRegressor
from rllab.optimizers.first_order_optimizer import FirstOrderOptimizer
class Latent_regressor(Parameterized, Serializable):
def __init__(
self,
env_spec,
policy,
recurrent=False,
predict_all=True,
obs_regressed='all',
act_regressed='all',
use_only_sign=False,
noisify_traj_coef=0,
optimizer=None, # this defaults to LBFGS
regressor_args=None, # here goes all args straight to the regressor: hidden_sizes, TR, step_size....
):
"""
:param predict_all: this is only for the recurrent case, to use all hidden states as predictions
:param obs_regressed: list of index of the obs variables used to fit the regressor. default string 'all'
:param act_regressed: list of index of the act variables used to fit the regressor. default string 'all'
:param regressor_args:
"""
self.env_spec = env_spec
self.policy = policy
self.latent_dim = policy.latent_dim
self.recurrent = recurrent
self.predict_all = predict_all
self.use_only_sign = use_only_sign
self.noisify_traj_coef = noisify_traj_coef
self.regressor_args = regressor_args
# decide what obs variables will be regressed upon
if obs_regressed == 'all':
self.obs_regressed = list(range(env_spec.observation_space.flat_dim))
else:
self.obs_regressed = obs_regressed
# decide what action variables will be regressed upon
if act_regressed == 'all':
self.act_regressed = list(range(env_spec.action_space.flat_dim))
else:
self.act_regressed = act_regressed
# shape the input dimension of the NN for the above decisions.
self.obs_act_dim = len(self.obs_regressed) + len(self.act_regressed)
Serializable.quick_init(self, locals()) # ??
if regressor_args is None:
regressor_args = dict()
if optimizer == 'first_order':
self.optimizer = FirstOrderOptimizer(
max_epochs=10, # both of these are to match Rocky's 10
batch_size=128,
)
elif optimizer is None:
self.optimizer = None
else:
raise NotImplementedError
if policy.latent_name == 'bernoulli':
if self.recurrent:
self._regressor = BernoulliRecurrentRegressor(
input_shape=(self.obs_act_dim,),
output_dim=policy.latent_dim,
optimizer=self.optimizer,
predict_all=self.predict_all,
**regressor_args
)
else:
self._regressor = BernoulliMLPRegressor(
input_shape=(self.obs_act_dim,),
output_dim=policy.latent_dim,
optimizer=self.optimizer,
**regressor_args
)
elif policy.latent_name == 'categorical':
if self.recurrent:
self._regressor = CategoricalRecurrentRegressor( # not implemented
input_shape=(self.obs_act_dim,),
output_dim=policy.latent_dim,
optimizer=self.optimizer,
# predict_all=self.predict_all,
**regressor_args
)
else:
self._regressor = CategoricalMLPRegressor(
input_shape=(self.obs_act_dim,),
output_dim=policy.latent_dim,
optimizer=self.optimizer,
**regressor_args
)
elif policy.latent_name == 'normal':
self._regressor = GaussianMLPRegressor(
input_shape=(self.obs_act_dim,),
output_dim=policy.latent_dim,
optimizer=self.optimizer,
**regressor_args
)
else:
raise NotImplementedError
def fit(self, paths):
logger.log('fitting the regressor...')
if self.recurrent:
observations = np.array([p["observations"][:, self.obs_regressed] for p in paths])
actions = np.array([p["actions"][:, self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=2)
if self.noisify_traj_coef:
obs_actions += np.random.normal(loc=0.0,
scale=float(np.mean(np.abs(obs_actions))) * self.noisify_traj_coef,
size=np.shape(obs_actions))
latents = np.array([p['agent_infos']['latents'] for p in paths])
self._regressor.fit(obs_actions, latents) # the input shapes are (traj, time, dim)
else:
observations = np.concatenate([p["observations"][:, self.obs_regressed] for p in paths])
actions = np.concatenate([p["actions"][:, self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=1)
latents = np.concatenate([p['agent_infos']["latents"] for p in paths])
if self.noisify_traj_coef:
obs_actions += np.random.normal(loc=0.0,
scale=float(np.mean(np.abs(obs_actions))) * self.noisify_traj_coef,
size=np.shape(obs_actions))
self._regressor.fit(obs_actions, latents.reshape((-1, self.latent_dim))) # why reshape??
logger.log('done fitting the regressor')
def predict(self, path):
if self.recurrent:
obs_actions = [np.concatenate([path["observations"][:, self.obs_regressed],
path["actions"][:, self.act_regressed]],
axis=1)] # is this the same??
else:
obs_actions = np.concatenate([path["observations"][:, self.obs_regressed],
path["actions"][:, self.act_regressed]], axis=1)
if self.noisify_traj_coef:
obs_actions += np.random.normal(loc=0.0, scale=float(np.mean(np.abs(obs_actions))) * self.noisify_traj_coef,
size=np.shape(obs_actions))
if self.use_only_sign:
obs_actions = np.sign(obs_actions)
return self._regressor.predict(obs_actions).flatten()
def get_output_p(self, path): # this gives the p_dist for every step: the latent posterior wrt obs_act
if self.recurrent:
obs_actions = [np.concatenate([path["observations"][:, self.obs_regressed],
path["actions"][:, self.act_regressed]],
axis=1)] # is this the same??
else:
obs_actions = np.concatenate([path["observations"][:, self.obs_regressed],
path["actions"][:, self.act_regressed]], axis=1)
if self.noisify_traj_coef:
obs_actions += np.random.normal(loc=0.0, scale=float(np.mean(np.abs(obs_actions))) * self.noisify_traj_coef,
size=np.shape(obs_actions))
if self.use_only_sign:
obs_actions = np.sign(obs_actions)
if self.policy.latent_name == 'bernoulli':
return self._regressor._f_p(obs_actions).flatten()
elif self.policy.latent_name == 'normal':
return self._regressor._f_pdists(obs_actions).flatten()
def get_param_values(self, **tags):
return self._regressor.get_param_values(**tags)
def set_param_values(self, flattened_params, **tags):
self._regressor.set_param_values(flattened_params, **tags)
def predict_log_likelihood(self, paths, latents):
if self.recurrent:
observations = np.array([p["observations"][:, self.obs_regressed] for p in paths])
actions = np.array([p["actions"][:, self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=2) # latents must match first 2dim: (batch,time)
else:
observations = np.concatenate([p["observations"][:, self.obs_regressed] for p in paths])
actions = np.concatenate([p["actions"][:, self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=1)
latents = np.concatenate(latents, axis=0)
if self.noisify_traj_coef:
noise = np.random.multivariate_normal(mean=np.zeros_like(np.mean(obs_actions, axis=0)),
cov=np.diag(np.mean(np.abs(obs_actions),
axis=0) * self.noisify_traj_coef),
size=np.shape(obs_actions)[0])
obs_actions += noise
if self.use_only_sign:
obs_actions = np.sign(obs_actions)
return self._regressor.predict_log_likelihood(obs_actions, latents) # see difference with fit above...
def lowb_mutual(self, paths, times=(0, None)):
if self.recurrent:
observations = np.array([p["observations"][times[0]:times[1], self.obs_regressed] for p in paths])
actions = np.array([p["actions"][times[0]:times[1], self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=2)
latents = np.array([p['agent_infos']['latents'][times[0]:times[1]] for p in paths])
else:
observations = np.concatenate([p["observations"][times[0]:times[1], self.obs_regressed] for p in paths])
actions = np.concatenate([p["actions"][times[0]:times[1], self.act_regressed] for p in paths])
obs_actions = np.concatenate([observations, actions], axis=1)
latents = np.concatenate([p['agent_infos']["latents"][times[0]:times[1]] for p in paths])
if self.noisify_traj_coef:
obs_actions += np.random.multivariate_normal(mean=np.zeros_like(np.mean(obs_actions,axis=0)),
cov=np.diag(np.mean(np.abs(obs_actions),
axis=0) * self.noisify_traj_coef),
size=np.shape(obs_actions)[0])
if self.use_only_sign:
obs_actions = np.sign(obs_actions)
H_latent = self.policy.latent_dist.entropy(self.policy.latent_dist_info) # sum of entropies latents in
return H_latent + np.mean(self._regressor.predict_log_likelihood(obs_actions, latents))
def log_diagnostics(self, paths):
logger.record_tabular(self._regressor._name + 'LowerB_MI', self.lowb_mutual(paths))
logger.record_tabular(self._regressor._name + 'LowerB_MI_5first', self.lowb_mutual(paths, times=(0, 5)))
logger.record_tabular(self._regressor._name + 'LowerB_MI_5last', self.lowb_mutual(paths, times=(-5, None)))
|
mit
| 5,297,087,816,919,702,000
| 52.075893
| 120
| 0.568761
| false
| 3.961679
| false
| false
| false
|
tomosoft-jp/SainSmartLcd
|
Graphic.py
|
1
|
5968
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from ST7735 import ST7735
class Graphic:
def __init__(self, pst7735):
self._st7735 = pst7735
def drawline(self, x0p, y0p, x1p, y1p, color):
if (x0p >= self._st7735.width) or (y0p >= self._st7735.height):
print " drawline x0, y0 Range error"
return
if (x1p >= self._st7735.width) or (y1p >= self._st7735.height):
print " drawline x1, y1 Range error"
return
x0 = x0p
y0 = y0p
x1 = x1p
y1 = y1p
steep = abs(y1 - y0) > abs(x1 - x0)
if steep:
x0, y0 = y0, x0
x1, y1 = y1, x1
if x0 > x1:
x0, x1 = x1, x0
y0, y1 = y1, y0
dx = x1 - x0
dy = abs(y1 - y0)
err = dx / 2
ystep = -1
if y0 < y1:
ystep = 1
for xx0 in range(x0, x1):
if steep:
self._st7735.dot(y0, xx0, color)
else:
self._st7735.dot(xx0, y0, color)
err -= dy
if err < 0:
y0 += ystep
err += dx
def drawrect(self, x, y, w, h, color):
if (x >= self._st7735.width) or (y >= self._st7735.height):
print " drawrect x, y Range error"
return
if ((x + w) >= self._st7735.width) or ((y + h) >= self._st7735.height):
print " drawrect w, h Range error"
return
self.drawline(x, y, x + w - 1, y, color)
self.drawline(x, y + h - 1, x + w - 1, y + h - 1, color)
self.drawline(x, y, x, y + h - 1, color)
self.drawline(x + w - 1, y, x + w - 1, y + h - 1, color)
def fillrect(self, x, y, w, h, color):
if (x >= self._st7735.width) or (y >= self._st7735.height):
print " fillrect x, y Range error"
return
# print " fillrect:{0:X}".format(x)
if (x + w - 1) >= self._st7735.width:
w = self._st7735.width - x
if (y + h - 1) >= self._st7735.height:
h = self._st7735.height - y
for xx in range(x, x + w):
for yy in range(y, y + h):
self._st7735.dot(xx, yy, color)
def fillscreen(self, color):
self.fillrect(0, 0, self._st7735.width, self._st7735.height, color)
def drawcircle(self, x0, y0, r, color):
f = 1 - r
ddf_x = 1
ddf_y = -2 * r
x = 0
y = r
self._st7735.dot(x0, y0 + r, color)
self._st7735.dot(x0, y0 - r, color)
self._st7735.dot(x0 + r, y0, color)
self._st7735.dot(x0 - r, y0, color)
while x < y:
if f >= 0:
y -= 1
ddf_y += 2
f += ddf_y
x += 1
ddf_x += 2
f += ddf_x
self._st7735.dot(x0 + x, y0 + y, color)
self._st7735.dot(x0 - x, y0 + y, color)
self._st7735.dot(x0 + x, y0 - y, color)
self._st7735.dot(x0 - x, y0 - y, color)
self._st7735.dot(x0 + y, y0 + x, color)
self._st7735.dot(x0 - y, y0 + x, color)
self._st7735.dot(x0 + y, y0 - x, color)
self._st7735.dot(x0 - y, y0 - x, color)
def drawcirclehelper(self, x0, y0, r, cornername, color):
f = 1 - r
ddf_x = 1
ddf_y = -2 * r
x = 0
y = r
while x < y:
if f >= 0:
y -= 1
ddf_y += 2
f += ddf_y
x += 1
ddf_x += 2
f += ddf_x
if cornername and 0x4:
self._st7735.dot(x0 + x, y0 + y, color)
self._st7735.dot(x0 + y, y0 + x, color)
if cornername and 0x2:
self._st7735.dot(x0 + x, y0 - y, color)
self._st7735.dot(x0 + y, y0 - x, color)
if cornername and 0x8:
self._st7735.dot(x0 - y, y0 + x, color)
self._st7735.dot(x0 - x, y0 + y, color)
if cornername and 0x1:
self._st7735.dot(x0 - y, y0 - x, color)
self._st7735.dot(x0 - x, y0 - y, color)
def fillcirclehelper(self, x0, y0, r, cornername, delta, color):
f = 1 - r
ddf_x = 1
ddf_y = -2 * r
x = 0
y = r
while x < y:
if f >= 0:
y -= 1
ddf_y += 2
f += ddf_y
x += 1
ddf_x += 2
f += ddf_x
if cornername & 0x1:
self.drawline(x0 + x, y0 - y, x0 + x, y0 - y + (2 * y + 1 + delta), color)
self.drawline(x0 + y, y0 - x, x0 + y, y0 - x + (2 * x + 1 + delta), color)
if cornername & 0x2:
self.drawline(x0 - x, y0 - y, x0 - x, y0 - y + (2 * y + 1 + delta), color)
self.drawline(x0 - y, y0 - x, x0 - y, y0 - x + (2 * x + 1 + delta), color)
def fillcircle(self, x0, y0, r, color):
self.drawline(x0, y0 - r, x0, y0 - r + (2 * r + 1), color)
self.fillcirclehelper(x0, y0, r, 3, 0, color)
if __name__ == "__main__":
ST7735_TFTWIDTH = 128
ST7735_TFTHEIGHT = 160
ST7735_BLACK = 0x000000
ST7735_BLUE = 0x0000FF
ST7735_RED = 0xFF0000
ST7735_GREEN = 0x008000
ST7735_CYAN = 0x00FFFF
ST7735_MAGENTA = 0xFF00FF
ST7735_YELLOW = 0xFFFF00
ST7735_WHITE = 0xFFFFFF
st7735 = ST7735(ST7735_TFTWIDTH, ST7735_TFTHEIGHT)
graphic = Graphic(st7735)
try:
graphic.fillscreen(ST7735_RED)
graphic.drawline(10, 10, ST7735_TFTWIDTH - 10, ST7735_TFTHEIGHT - 10, ST7735_BLACK)
graphic.drawrect(0, 40, 20, 40, ST7735_CYAN)
graphic.fillrect(80, 60, 40, 20, ST7735_YELLOW)
graphic.drawcircle(64, 40, 15, ST7735_MAGENTA)
graphic.fillcircle(64, 120, 30, ST7735_GREEN)
st7735.sendbuf()
except KeyboardInterrupt:
print '\nbreak'
# GPIO.cleanup()
|
mit
| 2,563,048,761,483,400,700
| 30.083333
| 91
| 0.453586
| false
| 2.84597
| false
| false
| false
|
ArnaudBelcour/Workflow_GeneList_Analysis
|
pathway_extraction/uniprot_retrieval_data.py
|
1
|
4827
|
#!/usr/bin/env python3
import math
import pandas as pa
import six
from SPARQLWrapper import SPARQLWrapper, JSON
from tqdm import tqdm
from . import *
def extract_information_from_uniprot(results_dataframe):
'''
Requests the SPARQL endpoint of Uniprot to retrieve (from Ensembl transcrit ID) GO terms, interpro, pfam/supfam and prosites.
The file taken as input file contains each gene associated with the result of a blast (that's the thing with 'hypothetical protein').
'''
if any(results_dataframe['Blast'].str.contains('hypothetical protein')):
results_dataframe['Blast'] = results_dataframe['Blast'].str[len('CEP03957.1hypothetical protein '):]
results_dataframe['Blast'] = results_dataframe['Blast'].str.replace(', partial', '')
results_dataframe.set_index("Gene_Name", inplace=True)
for gene, row in tqdm(results_dataframe.iterrows(), total=len(results_dataframe.index)):
gos_found = []
datas_found = []
enzymes_found = []
interpros = []
supfams = []
pfams = []
prosites = []
for transcript_id in row['Blast'].split(','):
transcript = 'ensembl:' + transcript_id
sparql = SPARQLWrapper('http://beta.sparql.uniprot.org/sparql')
sparql.setQuery("""
PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
PREFIX up:<http://purl.uniprot.org/core/>
PREFIX ensembl:<http://rdf.ebi.ac.uk/resource/ensembl/>
SELECT DISTINCT ?go
WHERE
{
?transcrit up:transcribedFrom ?ensemblName.
?protein rdfs:seeAlso ?transcrit .
?protein up:classifiedWith ?go .
FILTER (regex(str(?go), "GO")) .
VALUES ?ensemblName {""" + transcript + """}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
for result in results["results"]["bindings"]:
gos_found.append(result["go"]["value"][31:].replace("_", ":"))
sparql.setQuery("""
PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
PREFIX up:<http://purl.uniprot.org/core/>
PREFIX ensembl:<http://rdf.ebi.ac.uk/resource/ensembl/>
SELECT DISTINCT ?enzyme
WHERE
{
?transcrit up:transcribedFrom ?ensemblName.
?protein rdfs:seeAlso ?transcrit .
?protein up:enzyme ?ec .
VALUES ?ensemblName {""" + transcript + """}
}
""")
results = sparql.query().convert()
for result in results["results"]["bindings"]:
if "enzyme" in result:
enzymes_found.append('ec:' + result["enzyme"]["value"][len('http://purl.uniprot.org/enzyme/'):])
sparql.setQuery("""
PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
PREFIX up:<http://purl.uniprot.org/core/>
PREFIX ensembl:<http://rdf.ebi.ac.uk/resource/ensembl/>
SELECT DISTINCT ?data
WHERE
{
?transcrit up:transcribedFrom ?ensemblName.
?protein rdfs:seeAlso ?transcrit .
?protein rdfs:seeAlso ?data .
VALUES ?ensemblName {""" + transcript + """}
}
""")
results = sparql.query().convert()
for result in results["results"]["bindings"]:
datas_found.append(result["data"]["value"][len('http://purl.uniprot.org/'):])
for data in datas_found:
if 'interpro' in data:
data = data[len('interpro/'):]
interpros.append(data)
if 'supfam' in data:
data = data[len('supfam/'):]
supfams.append(data)
if 'pfam' in data and 'supfam' not in data:
data = data[len('pfam/'):]
pfams.append(data)
if 'prosite' in data:
data = data[len('prosite/'):]
prosites.append(data)
if row['GOs'] == '':
results_dataframe.set_value(gene, 'GOs', ','.join(gos_found))
#if row['EnzymeCodes'] == '':
#results_dataframe.set_value(gene, 'EnzymeCodes', ','.join(enzymes_found))
if row['InterProScan'] == '':
results_dataframe.set_value(gene, 'InterProScan', ','.join(interpros))
#results_dataframe.set_value(gene, 'supFams', str(supfams))
#results_dataframe.set_value(gene, 'pfams', str(pfams))
#results_dataframe.set_value(gene, 'prosites', str(prosites))
results_dataframe.reset_index(inplace=True)
return results_dataframe
|
agpl-3.0
| 201,268,150,668,236,600
| 37.309524
| 141
| 0.542987
| false
| 3.794811
| false
| false
| false
|
killer923/alarm
|
Alarm.py
|
1
|
5182
|
import os
import time
from Tkinter import Tk
from tkFileDialog import askopenfilename
def change_settings(first_time):
if first_time==0:
customizations=read_settings()
tone=customizations[0]
snooze=customizations[1]
settings=open("settings.txt","w")
settings.write("Please change only if you know what you are doing.\n")
settings.write("If you make a mistake simply delete this file.\n")
#set alarm tone
if first_time:
print "Select the alarm tone alarm tone: "
try:
Tk().withdraw()
except Exception as e:
print e
new_tone= askopenfilename()
print new_tone
settings.write("Alarm tone : "+new_tone+"\n")
else:
print "Current alarm tone: "+tone
print "Do you want to change the alarm tone:(Y|N) ",
response=raw_input()
if response=="y" or response=="Y":
try:
Tk().withdraw()
except Exception as e:
print e
new_tone=askopenfilename()
print new_tone
settings.write("Alarm tone : "+new_tone+"\n")
else:
settings.write("Alarm tone : "+tone+"\n")
#set snooze time
if first_time:
print "Enter the snooze time ( in minutes) :",
snooze=int(raw_input())
if snooze<1 or snooze>10:
check=0
check=1
while(check<1):
print "The range for snooze time is 1 minute to 10 minutes."
print "Please enter snooze time again :",
snooze=int(raw_input())
if snooze>=1 and snooze<=10:
check=1
settings.write("Snooze time : "+str(snooze)+"\n")
else:
print "Current snooze time is :"+str(snooze)
print "Do you want to change the snooze time? (Y|N) ",
response=raw_input()
if response=="y" or response=="Y":
print "Enter the new snooze time : ",
snooze=int(raw_input())
while(check<1):
print "The range for snooze time is 1 minute to 10 minutes."
print "Please enter snooze time again : ",
snooze=int(raw_input())
if snooze>=1 and snooze<=10:
check=1
settings.write("Snooze time: "+str(snooze)+"\n")
settings.close()
def create_settings():
print "Looks like you are using the program for the first time."
print "Thank you for choosing my program."
print "Please create settings for the program, you will be able to change them in the start of new run of the program."
change_settings(1)
def read_settings():
try:
settings=open("settings.txt","r")
except:
create_settings()
#print"ji"
settings=open("settings.txt","r")
try:
count=0
for line in settings:
#print count," ...",line
if count<2:
count=count+1
elif count==2:
tone=line
tone=tone.split(":")
#print "1==",tone
tone[1]=tone[1].split()[0]
tone1=tone[-1].split("/")
#print "2==",tone1
tone=tone[1]+":"
#print "3==",tone
tone1[-1]=tone1[-1].split("\\")[0]
if len(tone1)==1:
tone=tone+"\\"+str(tone1[0])
else:
for i in range(1,(len(tone1))):
tone=tone+"\\"+str(tone1[i])
#print "i=",i," ",tone
#tone=tone1.split()
#print tone
#tone=tone[0]
#print "tone="+tone
tone=tone.split("\n")[0]
count=count+1
#print count,tone
elif count==3: #read snooze time
snooze=line
snooze=snooze.split(":")
snooze=snooze[1].split()
snooze=int(snooze[0])
#print count,snooze
return [tone,snooze]
except Exception as x:
print count,x
print "There seems to be a problem with your settings file."
print "We will need to recreate it."
create_settings()
read_settings()
def ring(tone,snooze):
#print tone,str(snooze)
#print "Time to ring the alarm"
while 1:
os.startfile(tone)
time.sleep(snooze*60)
#ring(tone,snooze)
print "Come on Wake up... You are Getting Late ...."
def main():
print "Welcome"
print "Do you want to change settings? (Y|N) ",
response=raw_input()
if response=="y" or response=="Y":
change_settings(0)
customizations=read_settings()
#Get time to ring
print "Set time for alarm: "
#get hours
print " HH : ",
hh=int(raw_input())
check = 0
if hh<0 or hh>23:
check = -1
while check<0:
print " Hours does not exist, please enter again: ",
hh=int(raw_input())
if hh<0 or hh>24:
check = -1
else:
check = 0
#get time
print " MM : ",
mm=int(raw_input())
check = 0
if mm<0 or mm>59:
check = -1
while check<0:
print " Minutes does not exist, please enter again: ",
mm=int(raw_input())
if mm<0 or mm>24:
check = -1
else:
check = 0
#Get current time
sys_time=time.ctime()
sys_time=sys_time.split()
sys_time=sys_time[3].split(":")
sys_hh=int(sys_time[0])
sys_mm=int(sys_time[1])
#calculate sleeping time
if hh<sys_hh:
minutes=(60-sys_mm)+mm
hours=(23-sys_hh)+hh
elif hh==sys_hh:
if mm<sys_mm:
hours=23
minutes=(60-sys_mm)+mm
else:
hours=0
minutes=mm-sys_mm
else:
hours=hh-sys_hh-1
minutes=(60-sys_mm)+mm
if minutes >60:
hours=hours+1
minutes=minutes-60
elif minutes<0:
hours=hours-1
minutes=minutes+60
print "Alarm will ring after "+str(hours)+" hours and "+str(minutes)+" minutes."
seconds=(hours*3600)+(minutes*60)
#print "Alarm will ring after "+str(seconds)+" seconds."
time.sleep(seconds)
print "The program woke up :) \n Time for you to wake up too."
#print customizations
ring(customizations[0],customizations[1])
if __name__=='__main__':
main()
|
apache-2.0
| 738,152,008,914,051,300
| 24.653465
| 120
| 0.648784
| false
| 2.594892
| false
| false
| false
|
pliz/gunfolds
|
tools/pathtreetools.py
|
1
|
14198
|
import sys
sys.path.append('./tools/')
from pathtree import PathTree
from ortools.constraint_solver import pywrapcp
from matplotlib.cbook import flatten
from functools import wraps
import numpy as np
import bisect
from sortedcontainers import SortedDict
import ipdb
class SolutionNotFoundInTime(Exception):
pass
def ptloopnum(pt):
"""
Given a PathTree object returns the number of loops in it
:param pt: PathTree object
:return: number of loops (n)
"""
def ptn(pt, n=0):
for e in pt.loopset:
if type(e) is int:
n += 1
continue
n += ptn(e, n=1)
return n
return ptn(pt)
def ptnodenum(pt):
"""
Given a PathTree object returns the number of latents that comprise it
:param pt: PathTree object
:return: number of nodes (n)
"""
n = pt.preset - 1
def ptn(pt, n=0):
for e in pt.loopset:
if type(e) is int:
n += e - 1
continue
n += ptn(e, n=1)
return n
return n + ptn(pt)
def ptelement(pt, w):
"""
An element generated by a PathTree with a given weight setting
:param pt: PathTree
:param w: a list of weights
:return: an integer
"""
n = pt.preset
def sumloops(pt, w):
n = 0
ls = list(pt.loopset)
for i in range(len(ls)):
if type(ls[i]) is int:
n += w[i] * ls[i]
continue
n += w[i][0] * ls[i].preset \
+ min(1, w[i][0]) * sumloops(ls[i], w[i][1])
return n
return n + sumloops(pt, w)
def weights_pt(pt, weights):
c = [0]
def crawl(pt, w, c):
wl = []
for e in pt.loopset:
if type(e) is int:
wl.append(w[c[0]])
c[0] += 1
continue
ww = w[c[0]]
c[0] += 1
wl.append([ww, crawl(e, w, c)])
return wl
return crawl(pt, weights, c)
def extraloops_pt(pt, loops): # loops are tuples (loop, weight)
c = [0]
def crawl(pt, l, c):
first = [l[c[0]]]
wl = []
for e in pt.loopset:
c[0] += 1
if type(e) is int:
wl.append(l[c[0]])
continue
wl.append(crawl(e, l, c))
return first + [wl]
return crawl(pt, loops, c)
def ptelement_extraloop(pt, w, eloops):
"""
An element generated by a PathTree with a given weight setting and extra loops on each level
:param pt: PathTree
:param w: a list of list of weights
:param eloops: a list of tuples with lengths of extra loops and their weights
:return: an integer
"""
n = pt.preset + eloops[0][0] * eloops[0][1]
def sumloops(pt, w, lps):
ls = list(pt.loopset)
n = 0
for i in range(len(ls)):
if type(ls[i]) is int:
n += w[i] * ls[i] + min(1, w[i]) * lps[i][0] * lps[i][1]
continue
n += w[i][0] * ls[i].preset \
+ min(1, w[i][0]) * (lps[i][0][0] * lps[i][0][1] + sumloops(ls[i], w[i][1], lps[i][1]))
return n
return n + sumloops(pt, w, eloops[1])
def isptelement_el(el, pt, w, eloops):
return el == ptelement_extraloop(pt, w, eloops)
def isptsubset_el(elist, pt, w, eloops):
for i in range(elist[-1]):
if isptelement_el(i, pt, w, eloops):
if not i in elist:
return False
return True
def isrightpt(el, elist, pt, w, eloops):
for i in range(elist[-1]):
if isptelement_el(i, pt, w, eloops):
if not i in elist:
return False
if i == el and not isptelement_el(i, pt, w, eloops):
return False
return True
def ptelements(pt, seqlen=100, verbose=False, maxloop=100):
"""
Generate first `seqlen` elements from a pathtree
:param pt: a path tree object from pathtree.py
:param seqlen: number of elements to generate in ascending order
:param verbose: whether to print debugging information
:return: a list of elements
"""
solver = pywrapcp.Solver("pt-elements")
# declare variables
weights = []
N = ptloopnum(pt)
for i in range(N):
weights.append(solver.IntVar(0, maxloop, "w[%04i]" % i))
# declare constraints
# solver.Add()
# run the solver
solution = solver.Assignment()
solution.Add(weights)
db = solver.Phase(weights,
solver.CHOOSE_FIRST_UNBOUND,
solver.ASSIGN_MIN_VALUE)
solver.NewSearch(db)
num_solutions = 0
els = set()
while solver.NextSolution():
w = [x.Value() for x in weights]
num_solutions += 1
els.add(ptelement(pt, w))
if len(els) == seqlen:
break
solver.EndSearch()
# output solutions
if verbose:
print "num_solutions:", num_solutions
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
return list(els)
def isptelement(pt, element, verbose=False, maxloop=100):
"""
Check if an integer element is in the weight set represented by the path tree
:param pt: a path tree object from pathtree.py
:param element: an integer to check for presence in the weight
:param verbose: whether to print debugging information
:return: True or False
"""
solver = pywrapcp.Solver("isptelement")
# declare variables
weights = []
N = ptloopnum(pt)
if not N:
return element == pt.preset
for i in range(N):
weights.append(solver.IntVar(0, maxloop, "w[%04i]" % i))
wpt = weights_pt(pt, weights)
# declare constraints
solver.Add(element == ptelement(pt, wpt))
# run the solver
solution = solver.Assignment()
solution.Add(weights)
db = solver.Phase(weights,
solver.CHOOSE_FIRST_UNBOUND,
solver.ASSIGN_MIN_VALUE)
solver.NewSearch(db)
solution_exists = False
while solver.NextSolution():
solution_exists = True
break
solver.EndSearch()
# output solutions
if verbose:
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
return solution_exists
def loops_and_weights(solver, loops, weights):
"""
Add constraints to solver that make sure loops are not generated if subtree is not active due to a zero weight upstream
:param solver:
:param loops:
:param weights:
:return:
"""
def recurse(s, l, w):
for ww, ll in zip(w, l):
if type(ww) is list:
for e in flatten(ll):
s.Add((ww[0] == 0) <= (e == 0))
recurse(s, ll[1:], ww[1:])
else:
for e in flatten(ll):
s.Add((ww == 0) <= (e == 0))
recurse(solver, loops[1], weights)
def eloops_simplify(eloops):
l = []
for e in eloops:
if type(e) is list:
l.append(eloops_simplify(e))
else:
l.append(int(e[0].Value()))
return l
def ptaugmented(pt, eloops):
def augment(pt, ls):
pre = pt.preset
loop = pt.loopset
s = set()
if ls[0]:
s.add(ls[0])
for l, el in zip(loop, ls[1]):
if type(l) is int:
if not el:
s.add(l)
else:
s.add(PathTree({el}, pre=l))
continue
s.add(augment(l, el))
return PathTree(s, pre=pre)
t = augment(pt, eloops)
return t
def ptsubset(pt, elist):
for i in range(elist[-1]):
if isptelement(pt, i) and not i in elist:
return False
return True
def smallest_pt(ptlist):
if ptlist:
idx = np.argsort(map(ptnodenum, ptlist))
sol = ptlist[idx[0]]
else:
sol = None
return sol
def pairprint(pt1, pt2, k=40):
print np.c_[pt2seq(pt1, k), pt2seq(pt2, k)]
def etesteq(pt1, pt2, k=100):
a1 = np.asarray(pt2seq(pt1, k))
a2 = np.asarray(pt2seq(pt2, k))
return np.sum(a1 - a2) == 0
def keeptreegrow(pt, e, seq, cutoff=10, cap=1000):
t = None
while t is None:
t = growtree(pt, e, seq, cutoff=cutoff)
cutoff += 10
if cutoff > cap:
raise SolutionNotFoundInTime("Cannot keep the tree growing")
return t
def add_element(d, pt):
"""
Add a PathTree to dictionary d such that it is either appended to the list or added anew
Args:
d: a dictionary
pt: a PathTree
Returns:
"""
key = ptnodenum(pt)
if key in d:
d[key].append(pt)
else:
d[key] = pt
def del_element(d, pt, key=None):
"""
Delete a PathTree from dictionary d such that it is either removed from the list or the list that only contains one element is removed
Args:
d: a dictionary
pt: a PathTree
Returns:
"""
if key is None:
key = ptnodenum(pt)
if len(d[key]) == 1:
del d[key]
else:
d[key].remove(pt)
def swap_elements(d, pt1, pt2, key=None):
del_element(d, pt1, key=key)
add_element(d, pt2)
def seq2pt(seq, verbose=False, cutoff=100):
if not seq:
return None
pt = PathTree({}, pre=seq[0])
pts = SortedDict() # PathTrees
pts[ptnodenum(pt)] = [pt]
for e in seq[1:]:
e_is_in = False
for key in pts:
for pt in pts[key]:
if verbose:
print e
try:
newpt = keeptreegrow(pt, e, seq, cutoff=cutoff)
swap_elements(pts, pt, newpt, key=key)
e_is_in = True
break
except SolutionNotFoundInTime:
continue
if not e_is_in:
newpt = PathTree({}, pre=e)
add_element(d, newpt)
return pt
def growtree(pt, element, ref_elements, verbose=False, maxloop=100, cutoff=100):
"""
Add a loop with the minimal length to a path tree to enable it to generate a given element and still be a subset of a given list
:param pt: a path tree object from pathtree.py
:param element: an integer to check for presence in the weight
:param ref_elements: a (finite) list that should be a superset of numbers generated by the new path tree, for numbers smaller than tosubset[-1]
:param verbose: whether to print debugging information
:return: a PathTree augmented with a new loop
"""
solver = pywrapcp.Solver("loop_an_element")
# PathTree already can generate that number. Just to foolproof
if isptelement(pt, element):
return pt
# declare variables
weights = [] # weights denoting how many times a loop is active (marginalized)
loops = [] # extra loops that can be potentially added
lweights = [] # weights for the extra loops (marginalized out in the end)
ltuples = [] # tuple list to hold loops and weights together
N = ptloopnum(pt) # number of loops in the PathTree
for i in range(N):
weights.append(solver.IntVar(0, maxloop, "w[%04i]" % i))
for i in range(N + 1):
w = solver.IntVar(0, maxloop, "lw[%04i]" % i)
l = solver.IntVar(0, maxloop, "l[%04i]" % i)
lweights.append(w) # loop related weight
loops.append(l)
ltuples.append((l, w))
eloops = extraloops_pt(pt, ltuples)
ws = weights_pt(pt, weights)
# declare constraints
solver.Add(solver.MemberCt(ptelement_extraloop(pt, ws, eloops), ref_elements))
solver.Add(element == ptelement_extraloop(pt, ws, eloops)) # make sure the element can be generated
solver.Add(solver.Count(loops, 0, len(loops) - 1)) # only one loop is on
solver.Add(solver.Count(lweights, 0, len(lweights) - 1)) # only one loop is weighted
for i in range(len(lweights)):
solver.Add((lweights[i] == 0) <= (loops[i] == 0)) # if a loop has weight zero then it can't be active
# solver.Add(lweights[i] >= loops[i])
loops_and_weights(solver, eloops, ws) # if a subtree is off (weight zero) no need to add loops
# run the solver
solution = solver.Assignment()
solution.Add(loops)
db = solver.Phase(loops + lweights + weights,
solver.CHOOSE_FIRST_UNBOUND,
solver.ASSIGN_MIN_VALUE)
solver.NewSearch(db)
numsol = 0
pts = []
while solver.NextSolution():
# print numsol,
new_pt = ptaugmented(pt, eloops_simplify(eloops))
if verbose:
print "trying PathTree: ", new_pt
if ptsubset(new_pt, ref_elements):
pts.append(new_pt)
if verbose:
print "OK PathTree: ", pts[-1]
numsol += 1
if numsol >= cutoff:
break
solver.EndSearch()
# output solutions
if verbose:
print "solutions:", numsol
print "failures:", solver.Failures()
print "branches:", solver.Branches()
print "WallTime:", solver.WallTime()
print "for ", element, "solutions found ", numsol
return smallest_pt(pts)
def pt2seq(pt, num):
if not pt.loopset:
return [pt.preset]
i = 0
s = set()
while len(s) < num:
if isptelement(pt, i, maxloop=10 * num):
s.add(i)
i += 1
l = list(s)
l.sort()
return l
def s2spt(s): # convert edge set to pt
ss = set()
for e in s:
if type(e) is int:
ss.add(PathTree({0}, pre={e}))
continue
ss.add(e)
return ss
def spt_elements(spt, num):
"""
Generate numbers from a set of PathTrees
:param spt: set of PathTrees
:param num: number of elements (from the first) to generate
:return: list of num numbers
"""
i = 0
s = set()
while len(s) < num:
if issptelement(spt, i):
s.add(i)
i += 1
return list(s)
def issptelement(spt, element):
a = False
for pt in s2spt(spt):
a = a or isptelement(pt, element)
return a
|
gpl-3.0
| 8,853,819,413,874,525,000
| 25.538318
| 147
| 0.557191
| false
| 3.453661
| false
| false
| false
|
fabawi/QuestionAnswering
|
qa/answering_engines/babi_ann/challenges.py
|
1
|
5299
|
# Challenges:
# Returns the name of a challenge given a number. The number of epochs is predefined
class Challenges:
def __init__(self):
self.challenge_en10k_filename = {
# all challenges
1: '{}tasks_1-20_v1-2/en-10k/qa1_single-supporting-fact_{}.txt',
2: '{}tasks_1-20_v1-2/en-10k/qa2_two-supporting-facts_{}.txt',
3: '{}tasks_1-20_v1-2/en-10k/qa3_three-supporting-facts_{}.txt',
4: '{}tasks_1-20_v1-2/en-10k/qa4_two-arg-relations_{}.txt',
5: '{}tasks_1-20_v1-2/en-10k/qa5_three-arg-relations_{}.txt',
6: '{}tasks_1-20_v1-2/en-10k/qa6_yes-no-questions_{}.txt',
7: '{}tasks_1-20_v1-2/en-10k/qa7_counting_{}.txt',
8: '{}tasks_1-20_v1-2/en-10k/qa8_lists-sets_{}.txt',
9: '{}tasks_1-20_v1-2/en-10k/qa9_simple-negation_{}.txt',
10: '{}tasks_1-20_v1-2/en-10k/qa10_indefinite-knowledge_{}.txt',
11: '{}tasks_1-20_v1-2/en-10k/qa11_basic-coreference_{}.txt',
12: '{}tasks_1-20_v1-2/en-10k/qa12_conjunction_{}.txt',
13: '{}tasks_1-20_v1-2/en-10k/qa13_compound-coreference_{}.txt',
14: '{}tasks_1-20_v1-2/en-10k/qa14_time-reasoning_{}.txt',
15: '{}tasks_1-20_v1-2/en-10k/qa15_basic-deduction_{}.txt',
16: '{}tasks_1-20_v1-2/en-10k/qa16_basic-induction_{}.txt',
17: '{}tasks_1-20_v1-2/en-10k/qa17_positional-reasoning_{}.txt',
18: '{}tasks_1-20_v1-2/en-10k/qa18_size-reasoning_{}.txt',
19: '{}tasks_1-20_v1-2/en-10k/qa19_path-finding_{}.txt',
20: '{}tasks_1-20_v1-2/en-10k/qa20_agents-motivations_{}.txt'
}
self.epochs_en10k_number = {
1: 38,
2: 48,
3: 94,
4: 65,
5: 83,
6: 100,
7: 63,
8: 70,
9: 99,
10: 54,
11: 32,
12: 51,
13: 43,
14: 96,
15: 37,
16: 23,
17: 96,
18: 95,
19: 100,
20: 33
}
self.challenge_en_filename = {
# all challanges
1: '{}tasks_1-20_v1-2/en/qa1_single-supporting-fact_{}.txt',
2: '{}tasks_1-20_v1-2/en/qa2_two-supporting-facts_{}.txt',
3: '{}tasks_1-20_v1-2/en/qa3_three-supporting-facts_{}.txt',
4: '{}tasks_1-20_v1-2/en/qa4_two-arg-relations_{}.txt',
5: '{}tasks_1-20_v1-2/en/qa5_three-arg-relations_{}.txt',
6: '{}tasks_1-20_v1-2/en/qa6_yes-no-questions_{}.txt',
7: '{}tasks_1-20_v1-2/en/qa7_counting_{}.txt',
8: '{}tasks_1-20_v1-2/en/qa8_lists-sets_{}.txt',
9: '{}tasks_1-20_v1-2/en/qa9_simple-negation_{}.txt',
10: '{}tasks_1-20_v1-2/en/qa10_indefinite-knowledge_{}.txt',
11: '{}tasks_1-20_v1-2/en/qa11_basic-coreference_{}.txt',
12: '{}tasks_1-20_v1-2/en/qa12_conjunction_{}.txt',
13: '{}tasks_1-20_v1-2/en/qa13_compound-coreference_{}.txt',
14: '{}tasks_1-20_v1-2/en/qa14_time-reasoning_{}.txt',
15: '{}tasks_1-20_v1-2/en/qa15_basic-deduction_{}.txt',
16: '{}tasks_1-20_v1-2/en/qa16_basic-induction_{}.txt',
17: '{}tasks_1-20_v1-2/en/qa17_positional-reasoning_{}.txt',
18: '{}tasks_1-20_v1-2/en/qa18_size-reasoning_{}.txt',
19: '{}tasks_1-20_v1-2/en/qa19_path-finding_{}.txt',
20: '{}tasks_1-20_v1-2/en/qa20_agents-motivations_{}.txt'
}
self.epochs_en_number = {
1: 40,
2: 40,
3: 40,
4: 40,
5: 40,
6: 40,
7: 40,
8: 40,
9: 40,
10: 40,
11: 40,
12: 40,
13: 40,
14: 40,
15: 40,
16: 40,
17: 40,
18: 40,
19: 40,
20: 40
}
# In this list, each question task is defined as having the answer literal in the passage itself or a constant answer
# True means an answer is found in the passage, False means the answer is not in the passage
self.answer_in_passage = {
1: True,
2: True,
3: True,
4: True,
5: True,
6: False,
7: False,
8: True, #careful: this has two answers
9: False,
10: False,
11: True,
12: True,
13: True,
14: True,
15: True,
16: True,
17: False,
18: False,
19: False,
20: False
}
def get_challenge(self, challenge_id, challenge_type):
if challenge_type == 'en10k':
challenge_filename = self.challenge_en10k_filename[challenge_id]
number_epochs = self.epochs_en10k_number[challenge_id]
elif challenge_type == 'en':
challenge_filename = self.challenge_en_filename[challenge_id]
number_epochs = self.epochs_en_number[challenge_id]
answer_in_passage = self.answer_in_passage[challenge_id]
return challenge_filename, number_epochs,answer_in_passage
|
mit
| 7,969,111,048,269,106,000
| 38.544776
| 125
| 0.483487
| false
| 2.901972
| false
| false
| false
|
ArneBab/PyHurd
|
examples/showtrans.py
|
1
|
2983
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
showtrans.py - show files` passive translator.
Copyright (C) 2008 Anatoly A. Kazantsev
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public LicensODe along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
'''
import sys, errno, os
from optparse import OptionParser
from hurd import Port, O_NOTRANS, error
from mach import MACH_PORT_NULL
usage = 'Usage: %prog [OPTION...] FILE...'
description = """Show the passive translator of FILE...
A File argument of `-' prints the translator on the node attached to standart
input.
"""
parser = OptionParser(usage=usage, description=description)
parser.add_option('-p', '--prefix', dest='print_prefix',
action='store_true', default=None,
help="Always display `FILENAME: ' before translators")
parser.add_option('-P', '--no-prefix', dest='print_prefix',
action='store_false',
help="Never display `FILENAME: ' before translators")
parser.add_option('-s', '--silent', dest='silent', action='store_true',
default=False,
help='No output; useful when checking error status')
parser.add_option('-t', '--translated', dest='show_untrans',
action='store_false', default=True,
help='Only display files that have translators')
def print_node_trans (node, name):
if node is MACH_PORT_NULL:
error(0, -1, name)
else:
err, trans = node.get_translator()
if not err:
if not silent:
if print_prefix:
print '%s: %s' % (name, trans)
else:
print trans
global status
status = 0
elif err == errno.EINVAL:
if not silent and print_prefix and show_untrans:
print name
else:
error(0, err, name)
def main ():
options, args = parser.parse_args()
if len(args) == 0:
print usage
print "Try `%s --help' for more information." % sys.argv[0]
sys.exit()
global print_prefix, silent, show_untrans, status
status = 1
print_prefix = options.print_prefix
silent = options.silent
show_untrans = options.show_untrans
if not print_prefix:
print_prefix = len(args) > 1;
for arg in args:
if arg != '-':
print_node_trans (Port.lookup(arg, O_NOTRANS), arg)
else:
print_node_trans (Port.getdport(0), arg)
sys.exit(status)
if __name__ == "__main__":
main()
|
gpl-2.0
| -56,086,262,759,934,080
| 28.245098
| 77
| 0.65471
| false
| 3.775949
| false
| false
| false
|
alaeddine10/ggrc-core
|
src/ggrc/builder/json.py
|
1
|
12563
|
#i Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: david@reciprocitylabs.com
# Maintained By: david@reciprocitylabs.com
import ggrc.builder
import ggrc.services
import iso8601
from datetime import datetime
from flask import _request_ctx_stack
from ggrc import db
from ggrc.models.reflection import AttributeInfo
from ggrc.services.util import url_for
from sqlalchemy.ext.associationproxy import AssociationProxy
from sqlalchemy.orm.attributes import InstrumentedAttribute
from sqlalchemy.orm.properties import RelationshipProperty
from werkzeug.exceptions import BadRequest
"""JSON resource state representation handler for gGRC models."""
def view_url_for(obj):
view = getattr(ggrc.views, obj.__class__.__name__, None)
return view.url_for(obj) if view else None
def get_json_builder(obj):
"""Instantiate or retrieve a JSON representation builder for the given
object.
"""
if type(obj) is type:
cls = obj
else:
cls = obj.__class__
# Lookup the builder instance in the builder module
builder = getattr(ggrc.builder, cls.__name__, None)
if not builder:
# Create the builder and cache it in the builder module
builder = Builder(cls)
setattr(ggrc.builder, cls.__name__, builder)
return builder
def publish(obj, inclusions=()):
"""Translate ``obj`` into a valid JSON value. Objects with properties are
translated into a ``dict`` object representing a JSON object while simple
values are returned unchanged or specially formatted if needed.
"""
publisher = get_json_builder(obj)
if publisher and hasattr(publisher, '_publish_attrs') \
and publisher._publish_attrs:
ret = {}
self_url = url_for(obj)
if self_url:
ret['selfLink'] = self_url
view_url = view_url_for(obj)
if view_url:
ret['viewLink'] = view_url
ret.update(publisher.publish_contribution(obj, inclusions))
return ret
# Otherwise, just return the value itself by default
return obj
def update(obj, json_obj):
"""Translate the state represented by ``json_obj`` into update actions
performed upon the model object ``obj``. After performing the update ``obj``
and ``json_obj`` should be equivalent representations of the model state.
"""
updater = get_json_builder(obj)
if updater:
updater.update(obj, json_obj)
#FIXME what to do if no updater??
#Nothing, perhaps log, assume omitted by design
def create(obj, json_obj):
"""Translate the state represented by ``json_obj`` into update actions
performed upon the new model object ``obj``. After performing the update
``obj`` and ``json_obj`` should be equivalent representations of the model
state.
"""
creator = get_json_builder(obj)
if creator:
creator.create(obj, json_obj)
class UpdateAttrHandler(object):
"""Performs the translation of a JSON state representation into update
actions performed on a model object instance.
"""
@classmethod
def do_update_attr(cls, obj, json_obj, attr):
"""Perform the update to ``obj`` required to make the attribute attr
equivalent in ``obj`` and ``json_obj``.
"""
if (hasattr(attr, '__call__')):
# The attribute has been decorated with a callable, grab the name and
# invoke the callable to get the value
attr_name = attr.attr_name
value = attr(cls, obj, json_obj)
else:
# Lookup the method to use to perform the update. Use reflection to
# key off of the type of the attribute and invoke the method of the
# same name.
attr_name = attr
class_attr = getattr(obj.__class__, attr_name)
method = getattr(cls, class_attr.__class__.__name__)
value = method(obj, json_obj, attr_name, class_attr)
setattr(obj, attr_name, value)
@classmethod
def InstrumentedAttribute(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for an ``InstrumentedAttribute``"""
method = getattr(cls, class_attr.property.__class__.__name__)
return method(obj, json_obj, attr_name, class_attr)
@classmethod
def ColumnProperty(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for a ``ColumnProperty``"""
method = getattr(
cls,
class_attr.property.expression.type.__class__.__name__,
cls.default_column_handler)
return method(obj, json_obj, attr_name, class_attr)
@classmethod
def default_column_handler(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for a simple value column"""
return json_obj.get(attr_name)
@classmethod
def DateTime(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for a ``Datetime`` column."""
value = json_obj.get(attr_name)
try:
return iso8601.parse_date(value) if value else None
except iso8601.ParseError as e:
raise BadRequest(
'Malformed DateTime {0} for parameter {1}. '
'Error message was: {2}'.format(value, attr_name, e.message)
)
@classmethod
def Date(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for a ``Date`` column."""
value = json_obj.get(attr_name)
try:
return datetime.strptime(value, "%Y-%m-%d") if value else None
except ValueError as e:
raise BadRequest(
'Malformed Date {0} for parameter {1}. '
'Error message was: {2}'.format(value, attr_name, e.message)
)
@classmethod
def query_for(cls, rel_class, json_obj, attr_name, uselist):
"""Resolve the model object instance referred to by the JSON value."""
if uselist:
# The value is a collection of links, resolve the collection of objects
value = json_obj.get(attr_name)
rel_ids = [o[u'id'] for o in value] if value else []
if rel_ids:
return db.session.query(rel_class).filter(
rel_class.id.in_(rel_ids)).all()
else:
return []
else:
rel_obj = json_obj.get(attr_name)
if rel_obj:
try:
return db.session.query(rel_class).filter(
rel_class.id == rel_obj[u'id']).one()
except(TypeError):
raise TypeError(''.join(['Failed to convert attribute ', attr_name]))
return None
@classmethod
def RelationshipProperty(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for a ``RelationshipProperty``."""
rel_class = class_attr.property.mapper.class_
return cls.query_for(
rel_class, json_obj, attr_name, class_attr.property.uselist)
@classmethod
def AssociationProxy(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for an ``AssociationProxy``."""
rel_class = class_attr.remote_attr.property.mapper.class_
return cls.query_for(rel_class, json_obj, attr_name, True)
@classmethod
def property(cls, obj, json_obj, attr_name, class_attr):
"""Translate the JSON value for an object method decorated as a
``property``.
"""
#FIXME need a way to decide this. Require link? Use URNs?
# reflective approaches won't work as this is used for polymorphic
# properties
# rel_class = None
# return cls.query_for(rel_class, json_obj, attr_name, True)
if attr_name in json_obj:
url = json_obj[attr_name]['href']
rel_class_name = _request_ctx_stack.top.url_adapter.match(url, 'GET')[0]
from ggrc import models
rel_class = getattr(models, rel_class_name)
return cls.query_for(rel_class, json_obj, attr_name, False)
return None
class Builder(AttributeInfo):
"""JSON Dictionary builder for ggrc.models.* objects and their mixins."""
def generate_link_object_for(self, obj, inclusions, include):
"""Generate a link object for this object. If there are property paths
to be included specified in the ``inclusions`` parameter, those properties
will be added to the object representation. If the ``include`` parameter
is ``True`` the entire object will be represented in the result.
"""
if include:
return publish(obj, inclusions)
result = {'id': obj.id, 'href': url_for(obj)}
for path in inclusions:
if type(path) is not str and type(path) is not unicode:
attr_name, remaining_path = path[0], path[1:]
else:
attr_name, remaining_path = path, ()
result[attr_name] = self.publish_attr(obj, attr_name, remaining_path)
return result
def publish_link_collection(self, obj, attr_name, inclusions, include):
"""The ``attr_name`` attribute is a collection of object references;
translate the collection of object references into a collection of link
objects for the JSON dictionary representation.
"""
# FIXME: Remove the "if o is not None" when we can guarantee referential
# integrity
return [self.generate_link_object_for(o, inclusions, include)
for o in getattr(obj, attr_name) if o is not None]
def publish_link(self, obj, attr_name, inclusions, include):
"""The ``attr_name`` attribute is an object reference; translate the object
reference into a link object for the JSON dictionary representation.
"""
attr_value = getattr(obj, attr_name)
if attr_value:
return self.generate_link_object_for(attr_value, inclusions, include)
return None
def publish_attr(self, obj, attr_name, inclusions, include):
class_attr = getattr(obj.__class__, attr_name)
if isinstance(class_attr, AssociationProxy):
return self.publish_link_collection(obj, attr_name, inclusions, include)
elif isinstance(class_attr, InstrumentedAttribute) and \
isinstance(class_attr.property, RelationshipProperty):
if class_attr.property.uselist:
return self.publish_link_collection(
obj, attr_name, inclusions, include)
else:
return self.publish_link(obj, attr_name, inclusions, include)
elif isinstance(class_attr, property):
return self.publish_link(obj, attr_name, inclusions, include)
else:
return getattr(obj, attr_name)
def publish_attrs(self, obj, json_obj, inclusions):
"""Translate the state represented by ``obj`` into the JSON dictionary
``json_obj``.
The ``inclusions`` parameter can specify a tree of property paths to be
inlined into the representation. Leaf attributes will be inlined completely
if they are links to other objects. The inclusions data structure is a
list where the first segment of a path is a string and the next segment
is a list of segment paths. Here are some examples:
..
('directives')
[('directives'),('cycles')]
[('directives', ('audit_frequency','organization')),('cycles')]
"""
for attr in self._publish_attrs:
if hasattr(attr, '__call__'):
attr_name = attr.attr_name
else:
attr_name = attr
local_inclusion = ()
for inclusion in inclusions:
if inclusion[0] == attr_name:
local_inclusion = inclusion
break
json_obj[attr_name] = self.publish_attr(
obj, attr_name, local_inclusion[1:], len(local_inclusion) > 0)
@classmethod
def do_update_attrs(cls, obj, json_obj, attrs):
"""Translate every attribute in ``attrs`` from the JSON dictionary value
to a value or model object instance for references set for the attribute
in ``obj``.
"""
for attr_name in attrs:
UpdateAttrHandler.do_update_attr(obj, json_obj, attr_name)
def update_attrs(self, obj, json_obj):
"""Translate the state representation given by ``json_obj`` into the
model object ``obj``.
"""
self.do_update_attrs(obj, json_obj, self._update_attrs)
def create_attrs(self, obj, json_obj):
"""Translate the state representation given by ``json_obj`` into the new
model object ``obj``.
"""
self.do_update_attrs(obj, json_obj, self._create_attrs)
def publish_contribution(self, obj, inclusions):
"""Translate the state represented by ``obj`` into a JSON dictionary"""
json_obj = {}
self.publish_attrs(obj, json_obj, inclusions)
return json_obj
def update(self, obj, json_obj):
"""Update the state represented by ``obj`` to be equivalent to the state
represented by the JSON dictionary ``json_obj``.
"""
self.update_attrs(obj, json_obj)
def create(self, obj, json_obj):
"""Update the state of the new model object ``obj`` to be equivalent to the
state represented by the JSON dictionary ``json_obj``.
"""
self.create_attrs(obj, json_obj)
|
apache-2.0
| 2,375,897,023,168,792,600
| 37.774691
| 79
| 0.675316
| false
| 3.843071
| false
| false
| false
|
hpcugent/vsc-mympirun
|
bin/mytaskprolog.py
|
1
|
2185
|
#!/usr/bin/env python
#
# Copyright 2009-2021 Ghent University
#
# This file is part of vsc-mympirun,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# the Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/hpcugent/vsc-mympirun
#
# vsc-mympirun is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# vsc-mympirun is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with vsc-mympirun. If not, see <http://www.gnu.org/licenses/>.
#
"""
Generate preferred CUDA_VISIBLE_DEVICES as part of srun task prolog
Work around some slurm issues
"""
from __future__ import print_function
from vsc.utils.affinity import sched_getaffinity
def export(key, value):
"""print export key=value, which is picked up by the task prolog"""
print("export %s=%s" % (key, value))
def get_preferred_gpu_map():
# issue #158: make generic or wait for schedmd fix, eg python nvml bindings
# this is the joltik map: 32 cores, even cores for gpu 0-1, odd for gpus 2-3
# so we prefer first 8 even cores for gpu 0, first 8 odd cores for gpu 1 etc etc
GPU_MAP = [0, 2] * 8 + [1, 3] * 8
return GPU_MAP
def preferred_cvd():
"""Generate the CUDA_VISIBLE_DEVICES value"""
gpu_map = get_preferred_gpu_map()
current_idx = [idx for idx, bit in enumerate(sched_getaffinity().get_cpus()) if bit and idx < len(gpu_map)]
gpus = set([gpu_map[idx] for idx in current_idx])
export('CUDA_VISIBLE_DEVICES', ','.join([str(x) for x in sorted(gpus)]))
def main():
preferred_cvd()
if __name__ == '__main__':
main()
|
gpl-2.0
| -7,128,351,076,482,336,000
| 33.68254
| 111
| 0.701144
| false
| 3.171263
| false
| false
| false
|
NPWR/Year-2042
|
data.py
|
1
|
17122
|
from genericFunctions import *
import pygame as pg
from pygame.locals import *
from pygame import gfxdraw
from math import *
from random import randrange
from rigidBody import *
from levels import *
import sys
PI = pi
class Spaceship(rigidBody):
def __init__(self,pos,d = [0.,0.]):
rigidBody.__init__(self,pos,d)
self.c = (255,255,255)
self.c1 = (0,0,0)
self.bullets = []
self.fuel = MAX_FUEL_1
self.turretLevel = 0
self.hpLevel = 0
self.speedLevel = 0
self.LEVEL = 0
self.turretReady = True
self.turretCoolDown = TURRET_COOLDOWN[self.turretLevel]
self.turretCoolDownTime = 0
self.boosterReady = True
self.boosterCoolDown = 60
self.boosterCoolDownTime = 0
self.HP = HEALTH[self.hpLevel]
self.XP = 0
self.xpToNextLevel = LEVELS_XP[self.LEVEL]
self.speed = SPEED[self.speedLevel]
self.rocketParticles = ParticleSystem(ROCKET_COLOR,ROCKET_COLOR_VAR,ROCKET_LS,ROCKET_LS_VAR,ROCKET_MINSIZE,ROCKET_MAXSIZE)
self.boosterParticles = ParticleSystem(BOOSTER_COLOR,BOOSTER_COLOR_VAR,BOOSTER_LS,BOOSTER_LS_VAR,BOOSTER_MINSIZE,BOOSTER_MAXSIZE)
self.boosterParticles.setDrag(1.0)
self.rocketParticles.setDrag(DRAG)
self.shootAng = 0.
self.growth = 1.0
self.upgraded = False
self.bodySize = int(10*self.growth)
self.rearSize = int(4*self.growth)
def levelUp(self,upg):
self.LEVEL += 1
if upg == 'turret':
self.turretLevel += 1
if upg == 'health':
self.hpLevel += 1
if upg == 'speed':
self.speedLevel += 1
self.turretReady = True
self.turretCoolDown = TURRET_COOLDOWN[self.turretLevel]
self.turretCoolDownTime = 0
self.HP = HEALTH[self.hpLevel]
self.XP = 0
self.xpToNextLevel = LEVELS_XP[self.LEVEL]
self.speed = SPEED[self.speedLevel]
self.upgraded = True
def followMouse(self):
self.normalMove(self.shootAng)
self.upgraded = False
def addFuel(self):
self.fuel += FUEL_VALUE
if self.fuel > MAX_FUEL_1:
self.fuel = MAX_FUEL_1
if self.fuel < 0:
self.fuel = 0
self.XP += 20
def boost(self):
if self.fuel >= 10 and self.boosterReady:
x = cos(self.ang) * BOOST_SPEED
y = sin(self.ang ) * BOOST_SPEED
self.addMov([x,y])
self.boosterParticles.start(BOOSTER_FLUX,1)
self.fuel -= BOOST_COST
self.boosterReady = False
self.boosterCoolDownTime = self.boosterCoolDown
def normalMove(self,ang):
if self.fuel > 0:
spd = self.speed
x = cos(ang) * spd
y = sin(ang) * spd
self.addMov([x,y])
self.rocketParticles.start(ROCKET_FLUX)
self.fuel -= 1
def actAngle(self):
self.ang = atan2(self.d[1],self.d[0])
self.shootAng = atan2(pg.mouse.get_pos()[1] - CNTR[1], pg.mouse.get_pos()[0] - CNTR[0])
def coolDown(self):
if self.turretCoolDownTime > 0 and not self.turretReady:
self.turretCoolDownTime -= 1
else:
self.turretReady = True
if self.boosterCoolDownTime > 0 and not self.boosterReady:
self.boosterCoolDownTime -= 1
else:
self.boosterReady = True
def shoot(self):
if self.turretReady:
NB = {}
NB['POS'] = [self.pos[0],self.pos[1]]
x = cos(self.shootAng) * BULLET_SPEED + self.d[0]
y = sin(self.shootAng) * BULLET_SPEED + self.d[1]
NB['D'] = [x,y]
NB['AGE'] = 0
self.bullets.append(NB)
self.turretReady = False
self.turretCoolDownTime = self.turretCoolDown
def actuate(self):
self.move()
self.actAngle()
self.actBullets()
self.actParticles()
def actBullets(self):
for i,B in enumerate(self.bullets):
B['POS'][0] += B['D'][0]
B['POS'][1] += B['D'][1]
B['POS'][0] = int(B['POS'][0])
B['POS'][1] = int(B['POS'][1])
B['AGE'] += 1
if B['AGE'] > BULLET_LS:
self.bullets.pop(i)
self.coolDown()
def actParticles(self):
mang = atan2(self.d[1],self.d[0])
pmx = cos(mang)*30
pmy = sin(mang)*30
self.rocketParticles.actuate(self.pos,self.d,[pmx,pmy],ROCKET_SPREAD)
self.boosterParticles.actuate(self.pos,self.d,[pmx,pmy],BOOSTER_SPREAD)
def draw(self, SF, camPos):
#Particles drawing
self.rocketParticles.draw(SF,camPos)
self.boosterParticles.draw(SF,camPos)
#Calculating screen pos
pos = [self.pos[0]-camPos[0],self.pos[1]-camPos[1]]
#Ship Drawing
ang1 = self.ang + PI + PI/4.
ang2 = self.ang + PI - PI/4.
bodySize = int(10*self.growth)
rearSize = int(4*self.growth)
self.bodySize = bodySize
self.rearSize = rearSize
p1 = (int(pos[0] + cos(ang1)*bodySize), int(pos[1] + sin(ang1)*bodySize))
p2 = (int(pos[0] + cos(ang2)*bodySize), int(pos[1] + sin(ang2)*bodySize))
pg.gfxdraw.aacircle(SF,p1[0],p1[1],rearSize,self.c)
pg.gfxdraw.aacircle(SF,p2[0],p2[1],rearSize,self.c)
pg.draw.circle(SF,self.c1,pos,bodySize)
pg.gfxdraw.aacircle(SF,pos[0],pos[1],bodySize,self.c)
pg.gfxdraw.aacircle(SF,pos[0],pos[1],rearSize,ROCKET_COLOR)
for B in self.bullets:
p = (B['POS'][0] - camPos[0], B['POS'][1] - camPos[1])
pg.draw.circle(SF,self.c1,p,4)
pg.gfxdraw.aacircle(SF,p[0],p[1],4,self.c)
class Scene:
def __init__(self,BgDensity,BgDepth):
self.pos = (-CNTR[0],-CNTR[1])
self.vpos = [0.,0.]
self.dx, self.dy = 0.,0.
self.viewSize = WINSIZE
self.background = Background(BgDensity,BgDepth)
self.player = Spaceship(self.pos)
self.playerCell = [0,0]
self.cellStackTest = {}
self.cellStack = {}
self.genFuel()
self.previousCell = [0,0]
self.UI = {}
self.iUI = []
self.focus = 'GAME'
def signal(self,signal):
if signal == 'L':
self.player.normalMove(PI)
if signal == 'R':
self.player.normalMove(0)
if signal == 'U':
self.player.normalMove(-PI/2.)
if signal == 'D':
self.player.normalMove(PI/2.)
if signal == 'LCLICK':
if self.focus == 'UI':
choice = self.iUI[0].upgradeChoice()
if choice != None:
self.focus = 'GAME'
self.player.levelUp(BOX_TO_UPG[choice])
if signal == 'LCLICKH':
if self.focus == 'GAME':
self.player.shoot()
if signal == 'RCLICK':
if self.focus == 'UI':
choice = self.iUI[0].upgradeChoice()
if choice != None:
self.focus = 'GAME'
self.player.levelUp(BOX_TO_UPG[choice])
if signal == 'RCLICKH':
if self.focus == 'GAME':
self.player.followMouse()
if signal == 'SPACE':
if self.focus == 'GAME':
self.player.boost()
def addMov(self,vec):
self.dx += vec[0]
self.dy += vec[1]
def genFuel(self):
"""
Using dict for fuel cell notation:
fuel = {"x":x,
"y":y,
"dx":dx,
"dy":dy}
"""
for nb in AROUND:
cell = MOVE(self.playerCell,nb)
key = str(cell[0])+":"+str(cell[1])
been = False
try:
been = self.cellStackTest[key]
except:
been = False
if not been:
fuel = []
for i in range(FUEL_PER_CELL):
x = randrange(W)
y = randrange(H)
c = {'x':x, 'y':y, 'dx':0., 'dy':0.}
fuel.append(c)
self.cellStack[key] = fuel
self.cellStackTest[key] = True
def redefCell(self):
x = int(floor(self.player.pos[0] / W))
y = int(floor(self.player.pos[1] / H))
self.playerCell = [x,y]
if self.playerCell != self.previousCell:
self.previousCell = self.playerCell
self.genFuel()
def moveFuelCells(self):
for nb in AROUND:
cell = MOVE(self.playerCell, nb)
key = str(cell[0])+':'+str(cell[1])
for fuel in self.cellStack[key]:
fuel['x'] += fuel['dx']
fuel['y'] += fuel['dy']
fuel['dx'] *= DRAG
fuel['dy'] *= DRAG
def checkFuelCellsAttraction(self):
for nb in AROUND:
cell = MOVE(self.playerCell,nb)
key = str(cell[0])+':'+str(cell[1])
for i,fuel in enumerate(self.cellStack[key]):
x = (cell[0] * W + fuel['x']) - self.pos[0]
y = (cell[1] * H + fuel['y']) - self.pos[1]
if onScreen((x,y)):
dx = x - CNTR[0]
dy = y - CNTR[1]
d = hypot(dx,dy)
if d <= FUEL_MAGNET_RANGE:
g = FUEL_MAGNET_STRENGHT/(d)
ang = atan2(dy,dx) + PI
x = cos(ang)*g
y = sin(ang)*g
fuel['dx'] += x
fuel['dy'] += y
if d <= self.player.bodySize*2:
self.player.addFuel()
self.cellStack[key].pop(i)
def refreshUI(self):
self.UI['FUEL'].setCount(self.player.fuel)
self.UI['XP'].setCount(self.player.XP)
if self.player.XP >= self.player.xpToNextLevel:
self.player.XP = 0
self.iUI[0].appear()
self.focus = 'UI'
self.UI['XP'].setMax(LEVELS_XP[self.player.LEVEL])
self.UI['HP'].setMax(HEALTH[self.player.hpLevel])
def move(self):
self.vpos[0] += self.dx
self.vpos[1] += self.dy
self.dx *= DRAG
self.dy *= DRAG
self.actPos()
self.redefCell()
self.checkFuelCellsAttraction()
self.moveFuelCells()
self.refreshUI()
def addUI(self,key,ui,independant = False):
if not independant:
self.UI[key] = ui
else:
self.iUI.append(ui)
def followPlayer(self):
self.vpos[0] = self.player.vpos[0] - CNTR[0]
self.vpos[1] = self.player.vpos[1] - CNTR[1]
def actPos(self):
self.pos = (int(self.vpos[0]),int(self.vpos[1]))
def drawFuel(self,SF,cp):
for nb in AROUND:
cell = MOVE(self.playerCell,nb)
key = str(cell[0])+":"+str(cell[1])
for fp in self.cellStack[key]:
dx = cell[0] * W
dy = cell[1] * H
pos = (int((fp['x']+ dx)-cp[0]),int((fp['y']+dy)-cp[1]))
if onScreen(pos):
pg.draw.circle(SF,(0,0,0),pos,FUEL_SIZE)
pg.gfxdraw.aacircle(SF,pos[0],pos[1],FUEL_SIZE,FUEL_COLOR)
pg.gfxdraw.aacircle(SF,pos[0],pos[1],int(FUEL_SIZE/2.),FUEL_COLOR)
def drawUI(self,SF):
for i,key in enumerate(self.UI):
self.UI[key].draw(SF,UI_POS,i)
for ui in self.iUI:
ui.draw(SF)
def draw(self,SF):
self.background.draw(SF,self.pos)
self.drawFuel(SF,self.pos)
self.player.draw(SF,self.pos)
self.drawUI(SF)
class ParticleSystem:
def __init__(self, color, colorRange, medLs, varLs, minSize, maxSize):
self.baseCol = color
self.colorMod = colorRange
self.baseLifespan = medLs
self.lifespanVariation = varLs
self.minSize = minSize
self.maxSize = maxSize
self.active = False
self.particles = []
"""
Particles are modelised by a dict:
{"Px":x,
"Py":y,
"Dx":dx,
"Dy":dy,
"AGE":age,
"COLOR":(r,g,b),
"SIZE":s,
"BSIZE":s}
"""
self.time = 0
self.stopTime = 0
self.spawnRate = 0
self.DRAG = 1.0
def setDrag(self,drag):
self.DRAG = drag
def start(self,flux,stop = None):
if not self.active:
self.active = True
self.time = 0
if stop != None:
self.stopTime = stop
self.spawnRate = flux # particles/s
def stop(self):
if self.active:
self.active = False
self.time = 0
self.stopTime = 0
self.spawnRate = 0
def actuate(self, opos, omov, pmov, spread):
#Move existing particles and delete old ones
toDel = []
for i,particle in enumerate(self.particles):
particle["Px"] += particle["Dx"]
particle["Py"] += particle["Dy"]
particle["Dx"] *= self.DRAG
particle["Dy"] *= self.DRAG
particle["AGE"] += 1
particle["SIZE"] = int((float(particle["BSIZE"])/float(self.baseLifespan))*(float(self.baseLifespan)-float(particle["AGE"])))
if particle["SIZE"] < 1:
particle["SIZE"] = 1
rnd = randrange(-self.lifespanVariation,self.lifespanVariation)
if particle["AGE"] > self.baseLifespan + rnd:
toDel.append(i)
toDel.reverse()
for i in toDel:
self.particles.pop(i)
if self.active:
#Stop the system if necessary
if self.stopTime != 0:
if self.time >= self.stopTime:
self.stop()
#Spawn new particles
for particle in range(self.spawnRate):
newP = {}
r = randrange(self.baseCol[0] - self.colorMod, self.baseCol[0] + self.colorMod)
g = randrange(self.baseCol[1] - self.colorMod, self.baseCol[1] + self.colorMod)
b = randrange(self.baseCol[2] - self.colorMod, self.baseCol[2] + self.colorMod)
angleDev = int(degrees(spread)/2.)
angleDev = randrange(-angleDev,angleDev)
angleDev = radians(angleDev)
oAngle = atan2(pmov[1],pmov[0]) + PI
spd = hypot(pmov[0],pmov[1]) * (randrange(50,100)/100.)
nAngle = oAngle + angleDev
dx = cos(nAngle) * spd
dy = sin(nAngle) * spd
newP["Px"] = opos[0]
newP["Py"] = opos[1]
newP["Dx"] = omov[0] + dx
newP["Dy"] = omov[1] + dy
newP["AGE"] = 0
newP["COLOR"] = verifyColor((r,g,b))
newP["SIZE"] = randrange(self.minSize,self.maxSize)
newP["BSIZE"] = newP["SIZE"]
self.particles.append(newP)
self.time += 1
def draw(self,SF,cP):
for p in self.particles:
pos = (int(p["Px"])-cP[0],int(p["Py"])-cP[1])
pg.draw.circle(SF,p["COLOR"],pos,p["SIZE"])
class Background:
def __init__(self,density,depth):
self.density = density
self.depth = depth
self.initPlanes()
def initPlanes(self):
self.planes = []
for i in range(self.depth):
self.planes.append([])
for j in range(self.density*(i+1)):
star = (randrange(W),randrange(H))
self.planes[i].append(star)
self.planes.reverse()
self.surfaces = []
for j,plane in enumerate(self.planes):
i = (self.depth-1)-j
c = int((255/self.depth) * (self.depth - i))
c = (c,c,c)
newSF = pg.Surface((W*2,H*2))
smlSF = pg.Surface((W,H))
for star in plane:
pg.draw.circle(smlSF,c,star,2)
pg.gfxdraw.aacircle(smlSF,star[0],star[1],2,c)
newSF.blit(smlSF,(0,0))
newSF.blit(smlSF,(W,0))
newSF.blit(smlSF,(0,H))
newSF.blit(smlSF,(W,H))
newSF.set_colorkey((0,0,0),pg.RLEACCEL)
self.surfaces.append(newSF)
self.surfaces.reverse()
def draw(self,SF,camPos):
for i,surface in enumerate(self.surfaces):
dmod = (i+1)*(i+1)
pos = (int(camPos[0]/dmod),int(camPos[1]/dmod))
x = pos[0] % W
y = pos[1] % H
rct = ((x,y),(W,H))
SF.blit(surface,(0,0),rct)
|
gpl-2.0
| -6,190,535,035,971,781,000
| 29.250883
| 137
| 0.493634
| false
| 3.30796
| false
| false
| false
|
fugufisch/wholecell
|
state/metabolite.py
|
1
|
4402
|
import math
from data.knowledgebase import Knowledgebase
from state import State
__author__ = 'max'
__author__ = 'Sebastian'
class SingleMetabolite(State, object):
""""""
def __init__(self, metabolite_by_row):
"""Constructor for SingleMetabolite"""
super(SingleMetabolite, self).__init__(metabolite_by_row["WholeCellModelID"], metabolite_by_row["Name"])
self.__charge = float('nan')
self.__molecularWeightCalc = None #float('nan')
self.__exchangeLowerBound = float('nan')
self.__exchangeUpperBound = float('nan')
self.__reactions = None
self.__volume = float('nan')
self.__category = None
self._set_information(metabolite_by_row)
@property
def charge(self):
return self.__charge
@charge.setter
def charge(self, charge):
self.__charge = charge
@property
def molecularWeightCalc(self):
return self.__molecularWeightCalc
@molecularWeightCalc.setter
def molecularWeightCalc(self, molecularWeightCalc):
self.__molecularWeightCalc = molecularWeightCalc
@property
def exchangeLowerBound(self):
return self.__exchangeLowerBound
@exchangeLowerBound.setter
def exchangeLowerBound(self, exchangeLowerBound):
self.__exchangeLowerBound = exchangeLowerBound
@property
def exchangeUpperBound(self):
return self.__exchangeUpperBound
@exchangeUpperBound.setter
def exchangeUpperBound(self, exchangeUpperBound):
self.__exchangeUpperBound = exchangeUpperBound
@property
def reactions(self):
return self.__reactions
@reactions.setter
def reaction(self, reaction):
self.__reactions = reaction
@property
def volume(self):
return self.__volume
@volume.setter
def volume(self, volume):
self.__volume = volume
@property
def category(self):
return self.__category
@category.setter
def category(self, category):
self.__category = category
def _set_information(self, metabolite_by_row):
if not math.isnan(metabolite_by_row.Charge):
self.charge = metabolite_by_row.Charge
if not math.isnan(metabolite_by_row.MolecularWeightCalc):
self.molecularWeightCalc = metabolite_by_row.MolecularWeightCalc
if not math.isnan(metabolite_by_row.ExchangeLowerBound):
self.exchangeLowerBound = metabolite_by_row.ExchangeLowerBound
if not math.isnan(metabolite_by_row.ExchangeUpperBound):
self.exchangeUpperBound = metabolite_by_row.ExchangeUpperBound
if isinstance(metabolite_by_row.Reactions, str):
self.reaction = metabolite_by_row.Reactions.split(";")
if not math.isnan(metabolite_by_row.Volume):
self.volume = metabolite_by_row.Volume
if metabolite_by_row.Category:
self.category = metabolite_by_row.Category
class Metabolite(State, dict, object):
"""
Metabolites
"""
def __init__(self, init_dict):
super(Metabolite, self).__init__(init_dict["ID"], init_dict["name"])
self.kb = Knowledgebase(data_dir='../data', select_states=["metabolites"]) # get only the gene information
for i in range(len(self.kb.states.metabolites["WholeCellModelID"])): # iter over all genes
print self.kb.states.metabolites.transpose()[i] # get the line/gene information
self.add_metabolite(self.kb.states.metabolites.transpose()[i]) # get the complete ith row
def add_metabolite(self, metabolite_by_row):
"""
This function adds a metabolite to the metabolite dictionary
@param metabolite_by_row: panda object containing the row information of a gene
@return: None
"""
if metabolite_by_row.WholeCellModelID not in self and isinstance(metabolite_by_row.WholeCellModelID, str):
self[metabolite_by_row.WholeCellModelID] = SingleMetabolite(metabolite_by_row) # append each Single gene to a list of genes
elif isinstance(metabolite_by_row.WholeCellModelID, str):
print "WholeCellModelID {0} already known".format(metabolite_by_row.WholeCellModelID)
else:
print "Something strange WholeCellModelID: {0}".format(metabolite_by_row.WholeCellModelID)
if __name__ == "__main__":
Metabolite(({"ID": 2, "name":"metabolite"}))
|
mit
| 6,591,553,823,020,934,000
| 35.090164
| 136
| 0.664471
| false
| 3.641026
| false
| false
| false
|
smart-techs/you-get
|
src/you_get/extractors/iqiyi.py
|
1
|
8162
|
#!/usr/bin/env python
from ..common import *
from ..extractor import VideoExtractor
from uuid import uuid4
from random import random,randint
import json
from math import floor
from zlib import decompress
import hashlib
'''
Changelog:
-> http://www.iqiyi.com/common/flashplayer/20150916/MainPlayer_5_2_28_c3_3_7_4.swf
use @fffonion 's method in #617.
Add trace AVM(asasm) code in Iqiyi's encode function where the salt is put into the encode array and reassemble by RABCDasm(or WinRABCDasm),then use Fiddler to response modified file to replace the src file with its AutoResponder function ,set browser Fiddler proxy and play with !debug version! Flash Player ,finially get result in flashlog.txt(its location can be easily found in search engine).
Code Like (without letters after #comment:),it just do the job : trace("{IQIYI_SALT}:"+salt_array.join(""))
```(Postion After getTimer)
findpropstrict QName(PackageNamespace(""), "trace")
pushstring "{IQIYI_SALT}:" #comment for you to locate the salt
getscopeobject 1
getslot 17 #comment: 17 is the salt slots number defined in code
pushstring ""
callproperty QName(Namespace("http://adobe.com/AS3/2006/builtin"), "join"), 1
add
callpropvoid QName(PackageNamespace(""), "trace"), 1
```
-> http://www.iqiyi.com/common/flashplayer/20150820/MainPlayer_5_2_27_2_c3_3_7_3.swf
some small changes in Zombie.bite function
'''
'''
com.qiyi.player.core.model.def.DefinitonEnum
bid meaning for quality
0 none
1 standard
2 high
3 super
4 suprt-high
5 fullhd
10 4k
96 topspeed
'''
def mix(tvid):
salt = '4a1caba4b4465345366f28da7c117d20'
tm = str(randint(2000,4000))
sc = hashlib.new('md5', bytes(salt + tm + tvid, 'utf-8')).hexdigest()
return tm, sc, 'eknas'
def getVRSXORCode(arg1,arg2):
loc3=arg2 %3
if loc3 == 1:
return arg1^121
if loc3 == 2:
return arg1^72
return arg1^103
def getVrsEncodeCode(vlink):
loc6=0
loc2=''
loc3=vlink.split("-")
loc4=len(loc3)
# loc5=loc4-1
for i in range(loc4-1,-1,-1):
loc6=getVRSXORCode(int(loc3[loc4-i-1],16),i)
loc2+=chr(loc6)
return loc2[::-1]
def getDispathKey(rid):
tp=")(*&^flash@#$%a" #magic from swf
time=json.loads(get_content("http://data.video.qiyi.com/t?tn="+str(random())))["t"]
t=str(int(floor(int(time)/(10*60.0))))
return hashlib.new("md5",bytes(t+tp+rid,"utf-8")).hexdigest()
class Iqiyi(VideoExtractor):
name = "爱奇艺 (Iqiyi)"
stream_types = [
{'id': '4k', 'container': 'f4v', 'video_profile': '4K'},
{'id': 'fullhd', 'container': 'f4v', 'video_profile': '全高清'},
{'id': 'suprt-high', 'container': 'f4v', 'video_profile': '超高清'},
{'id': 'super', 'container': 'f4v', 'video_profile': '超清'},
{'id': 'high', 'container': 'f4v', 'video_profile': '高清'},
{'id': 'standard', 'container': 'f4v', 'video_profile': '标清'},
{'id': 'topspeed', 'container': 'f4v', 'video_profile': '最差'},
]
stream_to_bid = { '4k': 10, 'fullhd' : 5, 'suprt-high' : 4, 'super' : 3, 'high' : 2, 'standard' :1, 'topspeed' :96}
stream_urls = { '4k': [] , 'fullhd' : [], 'suprt-high' : [], 'super' : [], 'high' : [], 'standard' :[], 'topspeed' :[]}
baseurl = ''
gen_uid = ''
def getVMS(self):
#tm ->the flash run time for md5 usage
#um -> vip 1 normal 0
#authkey -> for password protected video ,replace '' with your password
#puid user.passportid may empty?
#TODO: support password protected video
tvid, vid = self.vid
tm, sc, src = mix(tvid)
uid = self.gen_uid
vmsreq='http://cache.video.qiyi.com/vms?key=fvip&src=1702633101b340d8917a69cf8a4b8c7' +\
"&tvId="+tvid+"&vid="+vid+"&vinfo=1&tm="+tm+\
"&enc="+sc+\
"&qyid="+uid+"&tn="+str(random()) +"&um=1" +\
"&authkey="+hashlib.new('md5',bytes(hashlib.new('md5', b'').hexdigest()+str(tm)+tvid,'utf-8')).hexdigest()
return json.loads(get_content(vmsreq))
def download_playlist_by_url(self, url, **kwargs):
self.url = url
video_page = get_content(url)
videos = set(re.findall(r'<a href="(http://www\.iqiyi\.com/v_[^"]+)"', video_page))
for video in videos:
self.__class__().download_by_url(video, **kwargs)
def prepare(self, **kwargs):
assert self.url or self.vid
if self.url and not self.vid:
html = get_html(self.url)
tvid = r1(r'#curid=(.+)_', self.url) or \
r1(r'tvid=([^&]+)', self.url) or \
r1(r'data-player-tvid="([^"]+)"', html)
videoid = r1(r'#curid=.+_(.*)$', self.url) or \
r1(r'vid=([^&]+)', self.url) or \
r1(r'data-player-videoid="([^"]+)"', html)
self.vid = (tvid, videoid)
self.gen_uid = uuid4().hex
try:
info = self.getVMS()
except:
self.download_playlist_by_url(self.url, **kwargs)
exit(0)
if info["code"] != "A000000":
log.e("[error] outdated iQIYI key")
log.wtf("is your you-get up-to-date?")
self.title = info["data"]["vi"]["vn"]
self.title = self.title.replace('\u200b', '')
# data.vp = json.data.vp
# data.vi = json.data.vi
# data.f4v = json.data.f4v
# if movieIsMember data.vp = json.data.np
#for highest qualities
#for http://www.iqiyi.com/v_19rrmmz5yw.html not vp -> np
try:
if info["data"]['vp']["tkl"]=='' :
raise ValueError
except:
log.e("[Error] Do not support for iQIYI VIP video.")
exit(-1)
vs = info["data"]["vp"]["tkl"][0]["vs"]
self.baseurl=info["data"]["vp"]["du"].split("/")
for stream in self.stream_types:
for i in vs:
if self.stream_to_bid[stream['id']] == i['bid']:
video_links=i["fs"] #now in i["flvs"] not in i["fs"]
if not i["fs"][0]["l"].startswith("/"):
tmp = getVrsEncodeCode(i["fs"][0]["l"])
if tmp.endswith('mp4'):
video_links = i["flvs"]
self.stream_urls[stream['id']] = video_links
size = 0
for l in video_links:
size += l['b']
self.streams[stream['id']] = {'container': stream['container'], 'video_profile': stream['video_profile'], 'size' : size}
break
def extract(self, **kwargs):
if 'stream_id' in kwargs and kwargs['stream_id']:
# Extract the stream
stream_id = kwargs['stream_id']
if stream_id not in self.streams:
log.e('[Error] Invalid video format.')
log.e('Run \'-i\' command with no specific video format to view all available formats.')
exit(2)
else:
# Extract stream with the best quality
stream_id = self.streams_sorted[0]['id']
urls=[]
for i in self.stream_urls[stream_id]:
vlink=i["l"]
if not vlink.startswith("/"):
#vlink is encode
vlink=getVrsEncodeCode(vlink)
key=getDispathKey(vlink.split("/")[-1].split(".")[0])
baseurl = [x for x in self.baseurl]
baseurl.insert(-1,key)
url="/".join(baseurl)+vlink+'?su='+self.gen_uid+'&qyid='+uuid4().hex+'&client=&z=&bt=&ct=&tn='+str(randint(10000,20000))
urls.append(json.loads(get_content(url))["l"])
#download should be complete in 10 minutes
#because the url is generated before start downloading
#and the key may be expired after 10 minutes
self.streams[stream_id]['src'] = urls
site = Iqiyi()
download = site.download_by_url
iqiyi_download_by_vid = site.download_by_vid
download_playlist = site.download_playlist_by_url
|
mit
| 7,460,090,059,082,132,000
| 36.804651
| 400
| 0.55438
| false
| 3.222839
| false
| false
| false
|
juju/juju-gui-charm
|
hooks/shelltoolbox.py
|
1
|
20055
|
# Copyright 2012 Canonical Ltd.
# This file is taken from the python-shelltoolbox package.
#
# IMPORTANT: Do not modify this file to add or change functionality. If you
# really feel the need to do so, first convert our code to the shelltoolbox
# library, and modify it instead (or modify the helpers or utils module here,
# as appropriate).
#
# python-shell-toolbox is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation, version 3 of the License.
#
# python-shell-toolbox is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License
# along with python-shell-toolbox. If not, see <http://www.gnu.org/licenses/>.
"""Helper functions for accessing shell commands in Python."""
__metaclass__ = type
__all__ = [
'apt_get_install',
'bzr_whois',
'cd',
'command',
'DictDiffer',
'environ',
'file_append',
'file_prepend',
'generate_ssh_keys',
'get_su_command',
'get_user_home',
'get_user_ids',
'install_extra_repositories',
'join_command',
'mkdirs',
'run',
'Serializer',
'script_name',
'search_file',
'ssh',
'su',
'user_exists',
'wait_for_page_contents',
]
from collections import namedtuple
from contextlib import contextmanager
from email.Utils import parseaddr
import errno
import json
import operator
import os
import pipes
import pwd
import re
import subprocess
import sys
from textwrap import dedent
import time
import urllib2
Env = namedtuple('Env', 'uid gid home')
def apt_get_install(*args, **kwargs):
"""Install given packages using apt.
It is possible to pass environment variables to be set during install
using keyword arguments.
:raises: subprocess.CalledProcessError
"""
caller = kwargs.pop('caller', run)
stderr = kwargs.pop('stderr', None)
debian_frontend = kwargs.pop('DEBIAN_FRONTEND', 'noninteractive')
with environ(DEBIAN_FRONTEND=debian_frontend, **kwargs):
cmd = ('apt-get', '-y', 'install') + args
return caller(*cmd, stderr=stderr)
def bzr_whois(user):
"""Return full name and email of bzr `user`.
Return None if the given `user` does not have a bzr user id.
"""
with su(user):
try:
whoami = run('bzr', 'whoami')
except (subprocess.CalledProcessError, OSError):
return None
return parseaddr(whoami)
@contextmanager
def cd(directory):
"""A context manager to temporarily change current working dir, e.g.::
>>> import os
>>> os.chdir('/tmp')
>>> with cd('/bin'): print os.getcwd()
/bin
>>> print os.getcwd()
/tmp
"""
cwd = os.getcwd()
os.chdir(directory)
try:
yield
finally:
os.chdir(cwd)
def command(*base_args):
"""Return a callable that will run the given command with any arguments.
The first argument is the path to the command to run, subsequent arguments
are command-line arguments to "bake into" the returned callable.
The callable runs the given executable and also takes arguments that will
be appeneded to the "baked in" arguments.
For example, this code will list a file named "foo" (if it exists):
ls_foo = command('/bin/ls', 'foo')
ls_foo()
While this invocation will list "foo" and "bar" (assuming they exist):
ls_foo('bar')
"""
def callable_command(*args):
all_args = base_args + args
return run(*all_args)
return callable_command
@contextmanager
def environ(**kwargs):
"""A context manager to temporarily change environment variables.
If an existing environment variable is changed, it is restored during
context cleanup::
>>> import os
>>> os.environ['MY_VARIABLE'] = 'foo'
>>> with environ(MY_VARIABLE='bar'): print os.getenv('MY_VARIABLE')
bar
>>> print os.getenv('MY_VARIABLE')
foo
>>> del os.environ['MY_VARIABLE']
If we are adding environment variables, they are removed during context
cleanup::
>>> import os
>>> with environ(MY_VAR1='foo', MY_VAR2='bar'):
... print os.getenv('MY_VAR1'), os.getenv('MY_VAR2')
foo bar
>>> os.getenv('MY_VAR1') == os.getenv('MY_VAR2') == None
True
"""
backup = {}
for key, value in kwargs.items():
backup[key] = os.getenv(key)
os.environ[key] = value
try:
yield
finally:
for key, value in backup.items():
if value is None:
del os.environ[key]
else:
os.environ[key] = value
def file_append(filename, line):
r"""Append given `line`, if not present, at the end of `filename`.
Usage example::
>>> import tempfile
>>> f = tempfile.NamedTemporaryFile('w', delete=False)
>>> f.write('line1\n')
>>> f.close()
>>> file_append(f.name, 'new line\n')
>>> open(f.name).read()
'line1\nnew line\n'
Nothing happens if the file already contains the given `line`::
>>> file_append(f.name, 'new line\n')
>>> open(f.name).read()
'line1\nnew line\n'
A new line is automatically added before the given `line` if it is not
present at the end of current file content::
>>> import tempfile
>>> f = tempfile.NamedTemporaryFile('w', delete=False)
>>> f.write('line1')
>>> f.close()
>>> file_append(f.name, 'new line\n')
>>> open(f.name).read()
'line1\nnew line\n'
The file is created if it does not exist::
>>> import tempfile
>>> filename = tempfile.mktemp()
>>> file_append(filename, 'line1\n')
>>> open(filename).read()
'line1\n'
"""
if not line.endswith('\n'):
line += '\n'
with open(filename, 'a+') as f:
lines = f.readlines()
if line not in lines:
if not lines or lines[-1].endswith('\n'):
f.write(line)
else:
f.write('\n' + line)
def file_prepend(filename, line):
r"""Insert given `line`, if not present, at the beginning of `filename`.
Usage example::
>>> import tempfile
>>> f = tempfile.NamedTemporaryFile('w', delete=False)
>>> f.write('line1\n')
>>> f.close()
>>> file_prepend(f.name, 'line0\n')
>>> open(f.name).read()
'line0\nline1\n'
If the file starts with the given `line`, nothing happens::
>>> file_prepend(f.name, 'line0\n')
>>> open(f.name).read()
'line0\nline1\n'
If the file contains the given `line`, but not at the beginning,
the line is moved on top::
>>> file_prepend(f.name, 'line1\n')
>>> open(f.name).read()
'line1\nline0\n'
"""
if not line.endswith('\n'):
line += '\n'
with open(filename, 'r+') as f:
lines = f.readlines()
if lines[0] != line:
try:
lines.remove(line)
except ValueError:
pass
lines.insert(0, line)
f.seek(0)
f.writelines(lines)
def generate_ssh_keys(path, passphrase=''):
"""Generate ssh key pair, saving them inside the given `directory`.
>>> generate_ssh_keys('/tmp/id_rsa')
0
>>> open('/tmp/id_rsa').readlines()[0].strip()
'-----BEGIN RSA PRIVATE KEY-----'
>>> open('/tmp/id_rsa.pub').read().startswith('ssh-rsa')
True
>>> os.remove('/tmp/id_rsa')
>>> os.remove('/tmp/id_rsa.pub')
If either of the key files already exist, generate_ssh_keys() will
raise an Exception.
Note that ssh-keygen will prompt if the keyfiles already exist, but
when we're using it non-interactively it's better to pre-empt that
behaviour.
>>> with open('/tmp/id_rsa', 'w') as key_file:
... key_file.write("Don't overwrite me, bro!")
>>> generate_ssh_keys('/tmp/id_rsa') # doctest: +ELLIPSIS
Traceback (most recent call last):
Exception: File /tmp/id_rsa already exists...
>>> os.remove('/tmp/id_rsa')
>>> with open('/tmp/id_rsa.pub', 'w') as key_file:
... key_file.write("Don't overwrite me, bro!")
>>> generate_ssh_keys('/tmp/id_rsa') # doctest: +ELLIPSIS
Traceback (most recent call last):
Exception: File /tmp/id_rsa.pub already exists...
>>> os.remove('/tmp/id_rsa.pub')
"""
if os.path.exists(path):
raise Exception("File {} already exists.".format(path))
if os.path.exists(path + '.pub'):
raise Exception("File {}.pub already exists.".format(path))
return subprocess.call([
'ssh-keygen', '-q', '-t', 'rsa', '-N', passphrase, '-f', path])
def get_su_command(user, args):
"""Return a command line as a sequence, prepending "su" if necessary.
This can be used together with `run` when the `su` context manager is not
enough (e.g. an external program uses uid rather than euid).
run(*get_su_command(user, ['bzr', 'whoami']))
If the su is requested as current user, the arguments are returned as
given::
>>> import getpass
>>> current_user = getpass.getuser()
>>> get_su_command(current_user, ('ls', '-l'))
('ls', '-l')
Otherwise, "su" is prepended::
>>> get_su_command('nobody', ('ls', '-l', 'my file'))
('su', 'nobody', '-c', "ls -l 'my file'")
"""
if get_user_ids(user)[0] != os.getuid():
args = [i for i in args if i is not None]
return ('su', user, '-c', join_command(args))
return args
def get_user_home(user):
"""Return the home directory of the given `user`.
>>> get_user_home('root')
'/root'
If the user does not exist, return a default /home/[username] home::
>>> get_user_home('_this_user_does_not_exist_')
'/home/_this_user_does_not_exist_'
"""
try:
return pwd.getpwnam(user).pw_dir
except KeyError:
return os.path.join(os.path.sep, 'home', user)
def get_user_ids(user):
"""Return the uid and gid of given `user`, e.g.::
>>> get_user_ids('root')
(0, 0)
"""
userdata = pwd.getpwnam(user)
return userdata.pw_uid, userdata.pw_gid
def install_extra_repositories(*repositories):
"""Install all of the extra repositories and update apt.
Given repositories can contain a "{distribution}" placeholder, that will
be replaced by current distribution codename.
:raises: subprocess.CalledProcessError
"""
distribution = run('lsb_release', '-cs').strip()
# Starting from Oneiric, `apt-add-repository` is interactive by
# default, and requires a "-y" flag to be set.
assume_yes = None if distribution == 'lucid' else '-y'
for repo in repositories:
repository = repo.format(distribution=distribution)
run('apt-add-repository', assume_yes, repository)
run('apt-get', 'clean')
run('apt-get', 'update')
def join_command(args):
"""Return a valid Unix command line from `args`.
>>> join_command(['ls', '-l'])
'ls -l'
Arguments containing spaces and empty args are correctly quoted::
>>> join_command(['command', 'arg1', 'arg containing spaces', ''])
"command arg1 'arg containing spaces' ''"
"""
return ' '.join(pipes.quote(arg) for arg in args)
def mkdirs(*args):
"""Create leaf directories (given as `args`) and all intermediate ones.
>>> import tempfile
>>> base_dir = tempfile.mktemp(suffix='/')
>>> dir1 = tempfile.mktemp(prefix=base_dir)
>>> dir2 = tempfile.mktemp(prefix=base_dir)
>>> mkdirs(dir1, dir2)
>>> os.path.isdir(dir1)
True
>>> os.path.isdir(dir2)
True
If the leaf directory already exists the function returns without errors::
>>> mkdirs(dir1)
An `OSError` is raised if the leaf path exists and it is a file::
>>> f = tempfile.NamedTemporaryFile(
... 'w', delete=False, prefix=base_dir)
>>> f.close()
>>> mkdirs(f.name) # doctest: +ELLIPSIS
Traceback (most recent call last):
OSError: ...
"""
for directory in args:
try:
os.makedirs(directory)
except OSError as err:
if err.errno != errno.EEXIST or os.path.isfile(directory):
raise
def run(*args, **kwargs):
"""Run the command with the given arguments.
The first argument is the path to the command to run.
Subsequent arguments are command-line arguments to be passed.
This function accepts all optional keyword arguments accepted by
`subprocess.Popen`.
"""
args = [i for i in args if i is not None]
pipe = subprocess.PIPE
process = subprocess.Popen(
args, stdout=kwargs.pop('stdout', pipe),
stderr=kwargs.pop('stderr', pipe),
close_fds=kwargs.pop('close_fds', True), **kwargs)
stdout, stderr = process.communicate()
if process.returncode:
exception = subprocess.CalledProcessError(
process.returncode, repr(args))
# The output argument of `CalledProcessError` was introduced in Python
# 2.7. Monkey patch the output here to avoid TypeErrors in older
# versions of Python, still preserving the output in Python 2.7.
exception.output = ''.join(filter(None, [stdout, stderr]))
raise exception
return stdout
def script_name():
"""Return the name of this script."""
return os.path.basename(sys.argv[0])
def search_file(regexp, filename):
"""Return the first line in `filename` that matches `regexp`."""
with open(filename) as f:
for line in f:
if re.search(regexp, line):
return line
def ssh(location, user=None, key=None, caller=subprocess.call):
"""Return a callable that can be used to run ssh shell commands.
The ssh `location` and, optionally, `user` must be given.
If the user is None then the current user is used for the connection.
The callable internally uses the given `caller`::
>>> def caller(cmd):
... print tuple(cmd)
>>> sshcall = ssh('example.com', 'myuser', caller=caller)
>>> root_sshcall = ssh('example.com', caller=caller)
>>> sshcall('ls -l') # doctest: +ELLIPSIS
('ssh', '-t', ..., 'myuser@example.com', '--', 'ls -l')
>>> root_sshcall('ls -l') # doctest: +ELLIPSIS
('ssh', '-t', ..., 'example.com', '--', 'ls -l')
The ssh key path can be optionally provided::
>>> root_sshcall = ssh('example.com', key='/tmp/foo', caller=caller)
>>> root_sshcall('ls -l') # doctest: +ELLIPSIS
('ssh', '-t', ..., '-i', '/tmp/foo', 'example.com', '--', 'ls -l')
If the ssh command exits with an error code,
a `subprocess.CalledProcessError` is raised::
>>> ssh('loc', caller=lambda cmd: 1)('ls -l') # doctest: +ELLIPSIS
Traceback (most recent call last):
CalledProcessError: ...
If ignore_errors is set to True when executing the command, no error
will be raised, even if the command itself returns an error code.
>>> sshcall = ssh('loc', caller=lambda cmd: 1)
>>> sshcall('ls -l', ignore_errors=True)
"""
sshcmd = [
'ssh',
'-t',
'-t', # Yes, this second -t is deliberate. See `man ssh`.
'-o', 'StrictHostKeyChecking=no',
'-o', 'UserKnownHostsFile=/dev/null',
]
if key is not None:
sshcmd.extend(['-i', key])
if user is not None:
location = '{}@{}'.format(user, location)
sshcmd.extend([location, '--'])
def _sshcall(cmd, ignore_errors=False):
command = sshcmd + [cmd]
retcode = caller(command)
if retcode and not ignore_errors:
raise subprocess.CalledProcessError(retcode, ' '.join(command))
return _sshcall
@contextmanager
def su(user):
"""A context manager to temporarily run the script as a different user."""
uid, gid = get_user_ids(user)
os.setegid(gid)
os.seteuid(uid)
home = get_user_home(user)
with environ(HOME=home):
try:
yield Env(uid, gid, home)
finally:
os.setegid(os.getgid())
os.seteuid(os.getuid())
def user_exists(username):
"""Return True if given `username` exists, e.g.::
>>> user_exists('root')
True
>>> user_exists('_this_user_does_not_exist_')
False
"""
try:
pwd.getpwnam(username)
except KeyError:
return False
return True
def wait_for_page_contents(url, contents, timeout=120, validate=None):
if validate is None:
validate = operator.contains
start_time = time.time()
while True:
try:
stream = urllib2.urlopen(url)
except (urllib2.HTTPError, urllib2.URLError):
pass
else:
page = stream.read()
if validate(page, contents):
return page
if time.time() - start_time >= timeout:
raise RuntimeError('timeout waiting for contents of ' + url)
time.sleep(0.1)
class DictDiffer:
"""
Calculate the difference between two dictionaries as:
(1) items added
(2) items removed
(3) keys same in both but changed values
(4) keys same in both and unchanged values
"""
# Based on answer by hughdbrown at:
# http://stackoverflow.com/questions/1165352
def __init__(self, current_dict, past_dict):
self.current_dict = current_dict
self.past_dict = past_dict
self.set_current = set(current_dict)
self.set_past = set(past_dict)
self.intersect = self.set_current.intersection(self.set_past)
@property
def added(self):
return self.set_current - self.intersect
@property
def removed(self):
return self.set_past - self.intersect
@property
def changed(self):
return set(key for key in self.intersect
if self.past_dict[key] != self.current_dict[key])
@property
def unchanged(self):
return set(key for key in self.intersect
if self.past_dict[key] == self.current_dict[key])
@property
def modified(self):
return self.current_dict != self.past_dict
@property
def added_or_changed(self):
return self.added.union(self.changed)
def _changes(self, keys):
new = {}
old = {}
for k in keys:
new[k] = self.current_dict.get(k)
old[k] = self.past_dict.get(k)
return "%s -> %s" % (old, new)
def __str__(self):
if self.modified:
s = dedent("""\
added: %s
removed: %s
changed: %s
unchanged: %s""") % (
self._changes(self.added),
self._changes(self.removed),
self._changes(self.changed),
list(self.unchanged))
else:
s = "no changes"
return s
class Serializer:
"""Handle JSON (de)serialization."""
def __init__(self, path, default=None, serialize=None, deserialize=None):
self.path = path
self.default = default or {}
self.serialize = serialize or json.dump
self.deserialize = deserialize or json.load
def exists(self):
return os.path.exists(self.path)
def get(self):
if self.exists():
with open(self.path) as f:
return self.deserialize(f)
return self.default
def set(self, data):
with open(self.path, 'w') as f:
self.serialize(data, f)
|
agpl-3.0
| -4,344,521,105,030,531,600
| 28.932836
| 78
| 0.588581
| false
| 3.869381
| false
| false
| false
|
ebozag/CLOUDCAL
|
simulator-ondemand-vms.py
|
1
|
4527
|
"""
"""
import random
import simpy
from math import trunc
import numpy
from configuration import *
ARRIVAL_RATE = 1/ARRIVAL_RATE
ARRIVAL_RATE *= 8
MAX_RATE = max(ARRIVAL_RATE)
SERVICE_TIME_SUM = 0.0
TIME_IN_THE_SYSTEM_SUM = 0.0
SERVICE_TIME_COUNT = 0
latency = []
latency_peak = []
REQUIRED_VMS = []
def source(env, interval, counter, avg_service_time,hour_slot):
CURRENT_ARRIVAL_SUM = 0.0
CURRENT_ARRIVAL_COUNT = 0
"""Source generates customers randomly"""
i=0
hourlyrate = ARRIVAL_RATE[hour_slot]
MAX_RATE = max(ARRIVAL_RATE[hour_slot:hour_slot+2])
pthinning = 1-hourlyrate/MAX_RATE
while env.now <= interval:
i+=1
c = customer(env, 'Request%02d' % i, counter, avg_service_time)
env.process(c)
uthin=0.0
pthin=1.0
t = env.now
t_old = t
while (uthin < pthin):
deltat = random.expovariate(MAX_RATE)
t = t + deltat
pthin = pthinning
uthin = random.random()
CURRENT_ARRIVAL_SUM += t-t_old
CURRENT_ARRIVAL_COUNT += 1
yield env.timeout(t-t_old)
print('Average rate: %d, %f' % (hour_slot, CURRENT_ARRIVAL_COUNT/CURRENT_ARRIVAL_SUM))
print('SUM, COUNT: %f. %d' % (CURRENT_ARRIVAL_SUM, CURRENT_ARRIVAL_COUNT))
def customer(env, name, counter, avg_service_time):
global SERVICE_TIME_SUM, SERVICE_TIME_COUNT, TIME_IN_THE_SYSTEM_SUM, latency
"""Customer arrives, is served and leaves."""
arrive = env.now
#print('%7.4f %s: Here I am' % (arrive, name))
with counter.request() as req:
# Wait for the counter or abort at the end of our tether
yield req
wait = env.now - arrive
# Customer request start being served
#print('%7.4f %s: Waiting Time: %7.4f' % (env.now, name, wait))
service_time = random.expovariate(1.0 / avg_service_time)
SERVICE_TIME_SUM += service_time
SERVICE_TIME_COUNT += 1
yield env.timeout(service_time)
#print('%7.4f %s: Serving Time: %7.4f' % (env.now, name, service_time))
#print('%7.4f %s: Finished - Time on the System: %7.4f' % (env.now, name, wait+service_time))
TIME_IN_THE_SYSTEM_SUM += wait+service_time
#latency = numpy.append(latency,wait+service_time)
latency.append(wait+service_time)
############ MAIN FUNCTION
print('Starting Simulations:')
print
hour_slot = 0
total_latency =[]
for hourly_rate in ARRIVAL_RATE:
average_latency = 2*MAX_AVERAGE_LATENCY
reserved_vms = 0
print('=================')
print('Hour Slot: %d' % hour_slot)
while MAX_AVERAGE_LATENCY < average_latency:
reserved_vms += 1
SERVICE_TIME_SUM = 0.0
SERVICE_TIME_COUNT = 0
latency = []
# Setup and start the simulation
print('=====================')
print('Reserved VMs: %d' % reserved_vms)
#random.seed(RANDOM_SEED)
env = simpy.Environment(initial_time=START_TIME)
# Start processes and run
total_capacity = reserved_vms * MAX_CONCURRENT_REQUESTS_PER_VM
counter = simpy.Resource(env, capacity=total_capacity)
env.process(source(env, SIMULATION_TIME / 24, counter, AVERAGE_SERVICE_TIME, hour_slot))
startTime = env.now
env.run()
print('Simulation Time: %7.4f' % (env.now-startTime))
print('Average Service Time: %7.4f' % (SERVICE_TIME_SUM/SERVICE_TIME_COUNT))
average_latency = numpy.average(latency)
print('Average Time in the System: %7.4f' % average_latency)
REQUIRED_VMS = numpy.append(REQUIRED_VMS,reserved_vms)
total_latency += latency
if hour_slot == 12 :
latency_peak = latency
hour_slot += 1
# Print results
print('=====================')
print('=====================')
print('=====================')
print('RESULTS:')
print
print('Max. Required Latency: %7.4f' % MAX_AVERAGE_LATENCY)
print('Average Latency: %7.4f' % numpy.average(total_latency))
print('90th Percentile Latency: %7.4f' % numpy.percentile(total_latency,90))
print('99th Percentile Latency: %7.4f' % numpy.percentile(total_latency,99))
print('Required Virtual Machines per hour slot:')
print(REQUIRED_VMS)
yearly_cost = 0
for required_vms_per_hour in REQUIRED_VMS:
yearly_cost += 365*required_vms_per_hour*VM_HOURLY_COST_ONDEMAND
print('Yearly cost: %7.4f' % (yearly_cost))
print('=====================')
## Print Latencies - ENABLE ONLY FOR DEBUG
#for v in latency_peak: print v
|
bsd-3-clause
| -5,801,104,839,550,042,000
| 31.811594
| 101
| 0.610559
| false
| 3.261527
| false
| false
| false
|
gghandsfield/musclePLSR
|
ju_scripts/src/matlab_strain_2_cm.py
|
1
|
2888
|
"""
Script to convert Matlab-generate strain files to cmiss files
To be used by data in strain_pred_20170712
In each row, each group of 15 data entries correspond to the following for a node
Principal_1
Principal_2
Principal_3
VonMises
Hydrostatic
Octahedral
PrincipalVector1 x
PrincipalVector1 y
PrincipalVector1 z
PrincipalVector2 x
PrincipalVector2 y
PrincipalVector2 z
PrincipalVector3 x
PrincipalVector3 y
PrincipalVector3 z
"""
import numpy as np
import cmissio
#=========================================================================#
# constants
ACTIVATIONS = (0.0, 0.2, 0.4, 0.6, 0.8, 1.0)
LENGTHS = (380, 384, 388, 392, 396, 400)
STRAIN_TEMPLATE_FILESTR = '../data/strain/strainL{}A{}.exdata'
STRAIN_FIELDS = [8,9,10,11,12,13,14,15,16]
STRAIN_FIELD_COMPONENTS = [1,1,1,1,1,1,3,3,3]
# parameters
skips = [(400, 1.0),(396, 1.0),(392, 1.0),(388, 1.0), (384, 1.0),(380, 1.0)] # observations to skip, outliers
plsrK = 2 # number of plsr modes (1 or 2)
responseName = 'geometry' #'geometry', 'stress', or 'strain'
xvalK = 36 - len(skips) # number of folds for k-fold cross validation. For leave 1 out, this
# should be the number of observations
#=========================================================================#
def _wrapExdata(X, fieldComponents):
# wrap X into list of fields
fields = []
nFields = len(fieldComponents)
fi = 0
xi = 0
while xi <= len(X):
if fi==0:
fields.append([])
nComps = fieldComponents[fi]
fields[-1].append(X[xi:xi+nComps])
xi += nComps
fi += 1
if fi==nFields:
fi = 0
return fields
def writeStrain(X, fname, header):
fields = _wrapExdata(X, STRAIN_FIELD_COMPONENTS)
cmissio.writeExdata(STRAIN_TEMPLATE_FILESTR.format(l, a),
fname,
header,
fields,
STRAIN_FIELDS)
#=========================================================================#
# input_fn = '../../strain_pred_20170712/pred_strain.txt'
# out_fn = '../../strain_pred_20170712/pred_exdata/pred_strainL{}A{}.exdata'
# out_header = 'predicted_strain_L{}A{}'
input_fn = '../../strain_pred_20170712/strain.txt'
out_fn = '../../strain_pred_20170712/actual_exdata/actual_strainL{}A{}.exdata'
out_header = 'actual_strain_L{}A{}'
file_data = np.loadtxt(input_fn, delimiter=',') # shape 30,15*nodes
# generate length and activations for each simulation
LA = []
for i, l in enumerate(LENGTHS):
for j, a in enumerate(ACTIVATIONS):
if (l, a) not in skips:
# LA.append([l, a])
LA.append([i+1, j+1])
# for each row (simulation)
for i, d in enumerate(file_data):
l, a = LA[i]
writeStrain(d, out_fn.format(l, a), out_header.format(l, a))
|
apache-2.0
| -7,811,895,322,634,500,000
| 29.4
| 112
| 0.563019
| false
| 3.212458
| false
| false
| false
|
hodger/cyclus
|
tests/tools.py
|
2
|
4942
|
from __future__ import print_function
import os
import re
import sys
import imp
import shutil
import unittest
import subprocess
import tempfile
from contextlib import contextmanager
from functools import wraps
from nose.tools import assert_true, assert_equal
from nose.plugins.attrib import attr
from nose.plugins.skip import SkipTest
from cyclus import lib as libcyclus
if sys.version_info[0] >= 3:
basestring = str
unit = attr('unit')
integration = attr('integration')
INPUT = os.path.join(os.path.dirname(__file__), "input")
def cleanfs(paths):
"""Removes the paths from the file system."""
for p in paths:
p = os.path.join(*p)
if os.path.isfile(p):
os.remove(p)
elif os.path.isdir(p):
shutil.rmtree(p)
def check_cmd(args, cwd, holdsrtn):
"""Runs a command in a subprocess and verifies that it executed properly.
"""
if not isinstance(args, basestring):
args = " ".join(args)
print("TESTING: running command in {0}:\n\n{1}\n".format(cwd, args))
f = tempfile.NamedTemporaryFile()
env = dict(os.environ)
env['_'] = cp = subprocess.check_output(['which', 'cyclus'], cwd=cwd,
universal_newlines=True).strip()
rtn = subprocess.call(args, shell=True, cwd=cwd, stdout=f, stderr=f, env=env)
if rtn != 0:
f.seek(0)
print('CYCLUS COMMAND: ' + cp)
print("STDOUT + STDERR:\n\n" + f.read().decode())
f.close()
holdsrtn[0] = rtn
assert_equal(rtn, 0)
@contextmanager
def clean_import(name, paths=None):
"""Imports and returns a module context manager and then removes
all modules which didn't originally exist when exiting the block.
Be sure to delete any references to the returned module prior to
exiting the context.
"""
sys.path = paths + sys.path
origmods = set(sys.modules.keys())
mod = imp.load_module(name, *imp.find_module(name, paths))
yield mod
sys.path = sys.path[len(paths):]
del mod
newmods = set(sys.modules.keys()) - origmods
for newmod in newmods:
del sys.modules[newmod]
TESTNAME_RE = re.compile('(?:^|[\\b_\\.-])[Tt]est')
def modtests(mod):
"""Finds all of the tests in a module."""
tests = []
for name in dir(mod):
if TESTNAME_RE.match(name) is None:
continue
test = getattr(mod, name)
if test is unittest.TestCase:
continue
tests.append(test)
return tests
def dirtests(d):
"""Finds all of the test files in a directory."""
files = os.listdir(d)
filenames = []
for file in files:
if not file.endswith('.py'):
continue
if TESTNAME_RE.match(file) is None:
continue
filenames.append(file[:-3])
return filenames
def skip_then_continue(msg=""):
"""A simple function to yield such that a test is marked as skipped
and we may continue on our merry way. A message may be optionally passed
to this function.
"""
raise SkipTest(msg)
@contextmanager
def indir(d):
"""Context manager for switching directorties and then switching back."""
cwd = os.getcwd()
os.chdir(d)
yield
os.chdir(cwd)
#
# Some Database API test helpers
#
LIBCYCLUS_HAS_BEEN_RUN = False
DBS = [('libcyclus-test.h5', 'libcyclus-orig.h5', libcyclus.Hdf5Back),
#('libcyclus-test.sqlite', 'libcyclus-orig.sqlite', libcyclus.SqliteBack)
]
def safe_call(cmd, shell=False, *args, **kwargs):
"""Checks that a command successfully runs with/without shell=True.
Returns the process return code.
"""
try:
rtn = subprocess.call(cmd, shell=False, *args, **kwargs)
except (subprocess.CalledProcessError, OSError):
cmd = ' '.join(cmd)
rtn = subprocess.call(cmd, shell=True, *args, **kwargs)
return rtn
def libcyclus_setup():
global LIBCYCLUS_HAS_BEEN_RUN
if not LIBCYCLUS_HAS_BEEN_RUN:
LIBCYCLUS_HAS_BEEN_RUN = True
for fname, oname, _ in DBS:
if os.path.isfile(fname):
os.remove(fname)
if os.path.isfile(oname):
os.remove(oname)
for fname, oname, _ in DBS:
if os.path.isfile(oname):
continue
safe_call(['cyclus', '-o' + oname,
os.path.join(INPUT, 'inventory.xml')])
def dbtest(f):
@wraps(f)
def wrapper():
for fname, oname, backend in DBS:
if os.path.exists(fname):
os.remove(fname)
shutil.copy(oname, fname)
db = backend(fname)
yield f, db, fname, backend
return wrapper
#
# Here there be Hackons!
#
# hack to make sure that we are actually in the tests dir when we start running
# tests. This works because this file is imported by many of the other test
# files.
_fdir = os.path.dirname(__file__)
if os.getcwd() != _fdir:
os.chdir(_fdir)
del _fdir
|
bsd-3-clause
| -36,318,217,697,277,230
| 26.303867
| 81
| 0.619385
| false
| 3.53
| true
| false
| false
|
spookylukey/django-autocomplete-light
|
autocomplete_light/autocomplete/generic.py
|
1
|
3905
|
from django.contrib.contenttypes.models import ContentType
from django.db.models import Q
from autocomplete_light.generic import GenericModelChoiceField
from .model import AutocompleteModel
__all__ = ['AutocompleteGeneric']
class AutocompleteGeneric(AutocompleteModel):
"""
Autocomplete which considers choices as a list of querysets. It inherits
from AutocompleteModel so make sure that you've read the docs and
docstrings for AutocompleteModel before using this class.
choices
A list of querysets.
search_fields
A list of lists of fields to search in, configurable like on
ModelAdmin.search_fields. The first list of fields will be used for the
first queryset in choices and so on.
AutocompleteGeneric inherits from AutocompleteModel and supports
`limit_choices` and `split_words` exactly like AutocompleteModel.
However `order_by` is not supported (yet) in AutocompleteGeneric.
"""
choices = None
search_fields = None
def choice_value(self, choice):
"""
Rely on GenericModelChoiceField to return a string containing the
content type id and object id of the result.
Because this autocomplete is made for that field, and to avoid code
duplication.
"""
field = GenericModelChoiceField()
return field.prepare_value(choice)
def validate_values(self):
"""
Ensure that every choice is part of a queryset.
"""
assert self.choices, 'autocomplete.choices should be a queryset list'
for value in self.values:
if not isinstance(value, basestring):
return False
try:
content_type_id, object_id = value.split('-', 1)
except ValueError:
return False
try:
content_type = ContentType.objects.get_for_id(content_type_id)
except ContentType.DoesNotExist:
return False
model_class = content_type.model_class()
found = False
for queryset in self.choices:
if queryset.model != model_class:
continue
if queryset.filter(pk=object_id).count() == 1:
found = True
else:
return False
if not found:
# maybe a user would cheat by using a forbidden ctype id !
return False
return True
def choices_for_request(self):
"""
Propose local results and fill the autocomplete with remote
suggestions.
"""
assert self.choices, 'autocomplete.choices should be a queryset list'
q = self.request.GET.get('q', '')
request_choices = []
querysets_left = len(self.choices)
i = 0
for queryset in self.choices:
conditions = self._choices_for_request_conditions(q,
self.search_fields[i])
limit = ((self.limit_choices - len(request_choices)) /
querysets_left)
for choice in queryset.filter(conditions)[:limit]:
request_choices.append(choice)
querysets_left -= 1
i += 1
return request_choices
def choices_for_values(self):
"""
Values which are not found in the querysets are ignored.
"""
values_choices = []
for queryset in self.choices:
ctype = ContentType.objects.get_for_model(queryset.model).pk
try:
ids = [x.split('-')[1] for x in self.values
if x is not None and int(x.split('-')[0]) == ctype]
except ValueError:
continue
for choice in queryset.filter(pk__in=ids):
values_choices.append(choice)
return values_choices
|
mit
| 6,551,789,736,588,345,000
| 29.992063
| 79
| 0.591549
| false
| 4.826947
| false
| false
| false
|
noironetworks/group-based-policy
|
gbpservice/_i18n.py
|
1
|
1052
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import oslo_i18n
DOMAIN = "gbpservice"
_translators = oslo_i18n.TranslatorFactory(domain=DOMAIN)
# The primary translation function using the well-known name "_"
_ = _translators.primary
# The contextual translation function using the name "_C"
_C = _translators.contextual_form
# The plural translation function using the name "_P"
_P = _translators.plural_form
def get_available_languages():
return oslo_i18n.get_available_languages(DOMAIN)
|
apache-2.0
| 7,634,804,986,224,047,000
| 31.875
| 78
| 0.739544
| false
| 3.784173
| false
| false
| false
|
fregaham/DISP
|
sqlobject/tests/test_indexes.py
|
1
|
1088
|
from sqlobject import *
from sqlobject.tests.dbtest import *
########################################
## Indexes
########################################
class SOIndex1(SQLObject):
name = StringCol(length=100)
number = IntCol()
nameIndex = DatabaseIndex('name', unique=True)
nameIndex2 = DatabaseIndex(name, number)
nameIndex3 = DatabaseIndex({'column': name,
'length': 3})
class SOIndex2(SQLObject):
name = StringCol()
nameIndex = DatabaseIndex({'expression': 'lower(name)'})
def test_1():
setupClass(SOIndex1)
n = 0
for name in 'blah blech boring yep yort snort'.split():
n += 1
SOIndex1(name=name, number=n)
mod = SOIndex1._connection.module
try:
SOIndex1(name='blah', number=0)
except (mod.ProgrammingError, mod.IntegrityError, mod.OperationalError, mod.DatabaseError):
# expected
pass
else:
assert 0, "Exception expected."
def test_2():
if not supports('expressionIndex'):
return
setupClass(SOIndex2)
SOIndex2(name='')
|
gpl-2.0
| 103,308,581,540,977,470
| 24.904762
| 95
| 0.581801
| false
| 3.942029
| false
| false
| false
|
fuziontech/svb
|
svb/test/test_http_client.py
|
1
|
13560
|
import sys
import unittest2
from mock import MagicMock, Mock, patch
import svb
from svb.test.helper import SvbUnitTestCase
VALID_API_METHODS = ('get', 'post', 'delete')
class HttpClientTests(SvbUnitTestCase):
def setUp(self):
super(HttpClientTests, self).setUp()
self.original_filters = svb.http_client.warnings.filters[:]
svb.http_client.warnings.simplefilter('ignore')
def tearDown(self):
svb.http_client.warnings.filters = self.original_filters
super(HttpClientTests, self).tearDown()
def check_default(self, none_libs, expected):
for lib in none_libs:
setattr(svb.http_client, lib, None)
inst = svb.http_client.new_default_http_client()
self.assertTrue(isinstance(inst, expected))
def test_new_default_http_client_urlfetch(self):
self.check_default((),
svb.http_client.UrlFetchClient)
def test_new_default_http_client_requests(self):
self.check_default(('urlfetch',),
svb.http_client.RequestsClient)
def test_new_default_http_client_pycurl(self):
self.check_default(('urlfetch', 'requests',),
svb.http_client.PycurlClient)
def test_new_default_http_client_urllib2(self):
self.check_default(('urlfetch', 'requests', 'pycurl'),
svb.http_client.Urllib2Client)
class ClientTestBase():
@property
def request_mock(self):
return self.request_mocks[self.request_client.name]
@property
def valid_url(self, path='/foo'):
return 'https://api.svb.com%s' % (path,)
def make_request(self, method, url, headers, post_data):
client = self.request_client(verify_ssl_certs=True)
return client.request(method, url, headers, post_data)
def mock_response(self, body, code):
raise NotImplementedError(
'You must implement this in your test subclass')
def mock_error(self, error):
raise NotImplementedError(
'You must implement this in your test subclass')
def check_call(self, meth, abs_url, headers, params):
raise NotImplementedError(
'You must implement this in your test subclass')
def test_request(self):
self.mock_response(self.request_mock, '{"foo": "baz"}', 200)
for meth in VALID_API_METHODS:
abs_url = self.valid_url
data = ''
if meth != 'post':
abs_url = '%s?%s' % (abs_url, data)
data = None
headers = {'my-header': 'header val'}
body, code, _ = self.make_request(
meth, abs_url, headers, data)
self.assertEqual(200, code)
self.assertEqual('{"foo": "baz"}', body)
self.check_call(self.request_mock, meth, abs_url,
data, headers)
def test_exception(self):
self.mock_error(self.request_mock)
self.assertRaises(svb.error.APIConnectionError,
self.make_request,
'get', self.valid_url, {}, None)
class RequestsVerify(object):
def __eq__(self, other):
return other and other.endswith('svb/data/ca-certificates.crt')
class RequestsClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.RequestsClient
def setUp(self):
super(RequestsClientTests, self).setUp()
self.session = MagicMock()
def test_timeout(self):
headers = {'my-header': 'header val'}
data = ''
self.mock_response(self.request_mock, '{"foo": "baz"}', 200)
self.make_request('POST', self.valid_url,
headers, data, timeout=5)
self.check_call(None, 'POST', self.valid_url,
data, headers, timeout=5)
def make_request(self, method, url, headers, post_data, timeout=80):
client = self.request_client(verify_ssl_certs=True,
timeout=timeout,
proxy='http://slap/')
return client.request(method, url, headers, post_data)
def mock_response(self, mock, body, code):
result = Mock()
result.content = body
result.status_code = code
self.session.request = MagicMock(return_value=result)
mock.Session = MagicMock(return_value=self.session)
def mock_error(self, mock):
mock.exceptions.RequestException = Exception
self.session.request.side_effect = mock.exceptions.RequestException()
mock.Session = MagicMock(return_value=self.session)
# Note that unlike other modules, we don't use the "mock" argument here
# because we need to run the request call against the internal mock
# session.
def check_call(self, mock, meth, url, post_data, headers, timeout=80):
self.session.request. \
assert_called_with(meth, url,
headers=headers,
data=post_data,
verify=RequestsVerify(),
proxies={"http": "http://slap/",
"https": "http://slap/"},
timeout=timeout)
class UrlFetchClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.UrlFetchClient
def mock_response(self, mock, body, code):
result = Mock()
result.content = body
result.status_code = code
mock.fetch = Mock(return_value=result)
def mock_error(self, mock):
mock.Error = mock.InvalidURLError = Exception
mock.fetch.side_effect = mock.InvalidURLError()
def check_call(self, mock, meth, url, post_data, headers):
mock.fetch.assert_called_with(
url=url,
method=meth,
headers=headers,
validate_certificate=True,
deadline=55,
payload=post_data
)
class Urllib2ClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.Urllib2Client
def make_request(self, method, url, headers, post_data, proxy=None):
self.client = self.request_client(verify_ssl_certs=True,
proxy=proxy)
self.proxy = proxy
return self.client.request(method, url, headers, post_data)
def mock_response(self, mock, body, code):
response = Mock
response.read = Mock(return_value=body)
response.code = code
response.info = Mock(return_value={})
self.request_object = Mock()
mock.Request = Mock(return_value=self.request_object)
mock.urlopen = Mock(return_value=response)
opener = Mock
opener.open = Mock(return_value=response)
mock.build_opener = Mock(return_value=opener)
mock.build_opener.open = opener.open
mock.ProxyHandler = Mock(return_value=opener)
mock.urlopen = Mock(return_value=response)
def mock_error(self, mock):
mock.urlopen.side_effect = ValueError
mock.build_opener().open.side_effect = ValueError
mock.build_opener.reset_mock()
def check_call(self, mock, meth, url, post_data, headers):
if sys.version_info >= (3, 0) and isinstance(post_data, basestring):
post_data = post_data.encode('utf-8')
mock.Request.assert_called_with(url, post_data, headers)
if (self.client._proxy):
self.assertTrue(type(self.client._proxy) is dict)
mock.ProxyHandler.assert_called_with(self.client._proxy)
mock.build_opener.open.assert_called_with(self.request_object)
self.assertTrue(not mock.urlopen.called)
if (not self.client._proxy):
mock.urlopen.assert_called_with(self.request_object)
self.assertTrue(not mock.build_opener.called)
self.assertTrue(not mock.build_opener.open.called)
class Urllib2ClientHttpsProxyTests(Urllib2ClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(Urllib2ClientHttpsProxyTests, self).make_request(
method, url, headers, post_data,
{"http": "http://slap/",
"https": "http://slap/"})
class Urllib2ClientHttpProxyTests(Urllib2ClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(Urllib2ClientHttpProxyTests, self).make_request(
method, url, headers, post_data,
"http://slap/")
class PycurlClientTests(SvbUnitTestCase, ClientTestBase):
request_client = svb.http_client.PycurlClient
def make_request(self, method, url, headers, post_data, proxy=None):
self.client = self.request_client(verify_ssl_certs=True,
proxy=proxy)
self.proxy = proxy
return self.client.request(method, url, headers, post_data)
@property
def request_mock(self):
if not hasattr(self, 'curl_mock'):
lib_mock = self.request_mocks[self.request_client.name]
self.curl_mock = Mock()
lib_mock.Curl = Mock(return_value=self.curl_mock)
return self.curl_mock
def setUp(self):
super(PycurlClientTests, self).setUp()
self.bio_patcher = patch('svb.util.io.BytesIO')
bio_mock = Mock()
self.bio_patcher.start().return_value = bio_mock
self.bio_getvalue = bio_mock.getvalue
def tearDown(self):
super(PycurlClientTests, self).tearDown()
self.bio_patcher.stop()
def mock_response(self, mock, body, code):
self.bio_getvalue.return_value = body.encode('utf-8')
mock.getinfo.return_value = code
def mock_error(self, mock):
class FakeException(BaseException):
@property
def args(self):
return ('foo', 'bar')
svb.http_client.pycurl.error = FakeException
mock.perform.side_effect = svb.http_client.pycurl.error
def check_call(self, mock, meth, url, post_data, headers):
lib_mock = self.request_mocks[self.request_client.name]
# A note on methodology here: we don't necessarily need to verify
# _every_ call to setopt, but check a few of them to make sure the
# right thing is happening. Keep an eye specifically on conditional
# statements where things are more likely to go wrong.
self.curl_mock.setopt.assert_any_call(lib_mock.NOSIGNAL, 1)
self.curl_mock.setopt.assert_any_call(lib_mock.URL,
svb.util.utf8(url))
if meth == 'get':
self.curl_mock.setopt.assert_any_call(lib_mock.HTTPGET, 1)
elif meth == 'post':
self.curl_mock.setopt.assert_any_call(lib_mock.POST, 1)
else:
self.curl_mock.setopt.assert_any_call(lib_mock.CUSTOMREQUEST,
meth.upper())
self.curl_mock.perform.assert_any_call()
class PycurlClientHttpProxyTests(PycurlClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(PycurlClientHttpProxyTests, self).make_request(
method, url, headers, post_data,
"http://user:withPwd@slap:8888/")
def check_call(self, mock, meth, url, post_data, headers):
lib_mock = self.request_mocks[self.request_client.name]
self.curl_mock.setopt.assert_any_call(lib_mock.PROXY, "slap")
self.curl_mock.setopt.assert_any_call(lib_mock.PROXYPORT, 8888)
self.curl_mock.setopt.assert_any_call(lib_mock.PROXYUSERPWD,
"user:withPwd")
super(PycurlClientHttpProxyTests, self).check_call(
mock, meth, url, post_data, headers)
class PycurlClientHttpsProxyTests(PycurlClientTests):
def make_request(self, method, url, headers, post_data, proxy=None):
return super(PycurlClientHttpsProxyTests, self).make_request(
method, url, headers, post_data,
{"http": "http://slap:8888/",
"https": "http://slap2:444/"})
def check_call(self, mock, meth, url, post_data, headers):
lib_mock = self.request_mocks[self.request_client.name]
self.curl_mock.setopt.assert_any_call(lib_mock.PROXY, "slap2")
self.curl_mock.setopt.assert_any_call(lib_mock.PROXYPORT, 444)
super(PycurlClientHttpsProxyTests, self).check_call(
mock, meth, url, post_data, headers)
class APIEncodeTest(SvbUnitTestCase):
def test_encode_dict(self):
body = {
'foo': {
'dob': {
'month': 1,
},
'name': 'bat'
},
}
values = [t for t in svb.api_requestor._api_encode(body)]
self.assertTrue(('foo[dob][month]', 1) in values)
self.assertTrue(('foo[name]', 'bat') in values)
def test_encode_array(self):
body = {
'foo': [{
'dob': {
'month': 1,
},
'name': 'bat'
}],
}
values = [t for t in svb.api_requestor._api_encode(body)]
self.assertTrue(('foo[][dob][month]', 1) in values)
self.assertTrue(('foo[][name]', 'bat') in values)
if __name__ == '__main__':
unittest2.main()
|
mit
| -1,805,596,033,668,120,800
| 33.416244
| 77
| 0.591667
| false
| 3.911162
| true
| false
| false
|
beeftornado/sentry
|
tests/sentry/integrations/slack/test_integration.py
|
1
|
6673
|
from __future__ import absolute_import
import responses
import six
from six.moves.urllib.parse import parse_qs, urlencode, urlparse
from sentry.integrations.slack import SlackIntegrationProvider, SlackIntegration
from sentry.models import (
AuditLogEntry,
AuditLogEntryEvent,
Identity,
IdentityProvider,
IdentityStatus,
Integration,
OrganizationIntegration,
)
from sentry.testutils import IntegrationTestCase, TestCase
from sentry.testutils.helpers import override_options
class SlackIntegrationTest(IntegrationTestCase):
provider = SlackIntegrationProvider
def assert_setup_flow(
self,
team_id="TXXXXXXX1",
authorizing_user_id="UXXXXXXX1",
expected_client_id="slack-client-id",
expected_client_secret="slack-client-secret",
):
responses.reset()
resp = self.client.get(self.init_path)
assert resp.status_code == 302
redirect = urlparse(resp["Location"])
assert redirect.scheme == "https"
assert redirect.netloc == "slack.com"
assert redirect.path == "/oauth/v2/authorize"
params = parse_qs(redirect.query)
scopes = self.provider.identity_oauth_scopes
assert params["scope"] == [" ".join(scopes)]
assert params["state"]
assert params["redirect_uri"] == ["http://testserver/extensions/slack/setup/"]
assert params["response_type"] == ["code"]
assert params["client_id"] == [expected_client_id]
assert params.get("user_scope") == ["links:read"]
# once we've asserted on it, switch to a singular values to make life
# easier
authorize_params = {k: v[0] for k, v in six.iteritems(params)}
access_json = {
"ok": True,
"access_token": "xoxb-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx",
"team": {"id": team_id, "name": "Example"},
"authed_user": {"id": authorizing_user_id},
}
responses.add(responses.POST, "https://slack.com/api/oauth.v2.access", json=access_json)
responses.add(
responses.GET,
"https://slack.com/api/team.info",
json={
"ok": True,
"team": {
"domain": "test-slack-workspace",
"icon": {"image_132": "http://example.com/ws_icon.jpg"},
},
},
)
resp = self.client.get(
u"{}?{}".format(
self.setup_path,
urlencode({"code": "oauth-code", "state": authorize_params["state"]}),
)
)
mock_request = responses.calls[0].request
req_params = parse_qs(mock_request.body)
assert req_params["grant_type"] == ["authorization_code"]
assert req_params["code"] == ["oauth-code"]
assert req_params["redirect_uri"] == ["http://testserver/extensions/slack/setup/"]
assert req_params["client_id"] == [expected_client_id]
assert req_params["client_secret"] == [expected_client_secret]
assert resp.status_code == 200
self.assertDialogSuccess(resp)
@responses.activate
def test_bot_flow(self):
self.assert_setup_flow()
integration = Integration.objects.get(provider=self.provider.key)
assert integration.external_id == "TXXXXXXX1"
assert integration.name == "Example"
assert integration.metadata == {
"access_token": "xoxb-xxxxxxxxx-xxxxxxxxxx-xxxxxxxxxxxx",
"scopes": sorted(self.provider.identity_oauth_scopes),
"icon": "http://example.com/ws_icon.jpg",
"domain_name": "test-slack-workspace.slack.com",
"installation_type": "born_as_bot",
}
oi = OrganizationIntegration.objects.get(
integration=integration, organization=self.organization
)
assert oi.config == {}
idp = IdentityProvider.objects.get(type="slack", external_id="TXXXXXXX1")
identity = Identity.objects.get(idp=idp, user=self.user, external_id="UXXXXXXX1")
assert identity.status == IdentityStatus.VALID
audit_entry = AuditLogEntry.objects.get(event=AuditLogEntryEvent.INTEGRATION_ADD)
assert audit_entry.get_note() == "installed Example for the slack integration"
@responses.activate
def test_multiple_integrations(self):
self.assert_setup_flow()
self.assert_setup_flow(team_id="TXXXXXXX2", authorizing_user_id="UXXXXXXX2")
integrations = Integration.objects.filter(provider=self.provider.key).order_by(
"external_id"
)
assert integrations.count() == 2
assert integrations[0].external_id == "TXXXXXXX1"
assert integrations[1].external_id == "TXXXXXXX2"
oi = OrganizationIntegration.objects.get(
integration=integrations[1], organization=self.organization
)
assert oi.config == {}
idps = IdentityProvider.objects.filter(type="slack")
assert idps.count() == 2
identities = Identity.objects.all()
assert identities.count() == 2
assert identities[0].external_id != identities[1].external_id
assert identities[0].idp != identities[1].idp
@responses.activate
def test_reassign_user(self):
self.assert_setup_flow()
identity = Identity.objects.get()
assert identity.external_id == "UXXXXXXX1"
self.assert_setup_flow(authorizing_user_id="UXXXXXXX2")
identity = Identity.objects.get()
assert identity.external_id == "UXXXXXXX2"
@responses.activate
def test_install_v2(self):
with override_options(
{"slack-v2.client-id": "other-id", "slack-v2.client-secret": "other-secret"}
):
self.assert_setup_flow(
expected_client_id="other-id", expected_client_secret="other-secret",
)
class SlackIntegrationConfigTest(TestCase):
def setUp(self):
self.integration = Integration.objects.create(provider="slack", name="Slack", metadata={})
self.installation = SlackIntegration(self.integration, self.organization.id)
def test_config_data_workspace_app(self):
self.installation.get_config_data()["installationType"] = "workspace_app"
def test_config_data_user_token(self):
self.integration.metadata["user_access_token"] = "token"
self.installation.get_config_data()["installationType"] = "classic_bot"
def test_config_data_born_as_bot(self):
self.integration.metadata["installation_type"] = "born_as_bot"
self.installation.get_config_data()["installationType"] = "born_as_bot"
|
bsd-3-clause
| -5,141,144,303,626,453,000
| 36.27933
| 98
| 0.624157
| false
| 3.927604
| true
| false
| false
|
radez/python-heatclient
|
setup.py
|
1
|
1880
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import setuptools
from heatclient.openstack.common import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setuptools.setup(
name="python-heatclient",
version=setup.get_post_version('heatclient'),
author='Heat API Developers',
author_email='discuss@heat-api.org',
description="Client library for Heat orchestration API",
long_description=read('README.md'),
license='Apache',
url='https://github.com/heat-api/python-heatclient',
packages=setuptools.find_packages(exclude=['tests', 'tests.*']),
include_package_data=True,
install_requires=setup.parse_requirements(),
test_suite="nose.collector",
cmdclass=setup.get_cmdclass(),
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
entry_points={
'console_scripts': ['heat = heatclient.shell:main']
},
dependency_links=setup.parse_dependency_links(),
tests_require=setup.parse_requirements(['tools/test-requires']),
setup_requires=['setuptools-git>=0.4'],
)
|
apache-2.0
| 5,432,592,217,602,906,000
| 35.862745
| 74
| 0.697872
| false
| 4.095861
| false
| false
| false
|
mchrzanowski/ProjectEuler
|
src/python/Problem105.py
|
1
|
3645
|
'''
Created on Aug 18, 2012
@author: mchrzanowski
'''
import itertools
import os.path
import time
def do_two_subsets_equal_each_other(numbers, value_to_equal):
'''
exact subset problem.
return true if we have a subset that equals value_to_equal.
http://www.cs.dartmouth.edu/~ac/Teach/CS105-Winter05/Notes/nanda-scribe-3.pdf
'''
def merge_lists(first, second):
return first + second
def add_to_every_element(value_to_add, elements):
return map(lambda x: x + value_to_add, elements)
L = [[0]]
numbers = list(numbers) # we need to preserve position.
numbers.insert(0, 0) # we need a header for the below algo.
for i in xrange(1, len(numbers)):
L.append(list())
raw_list = merge_lists(L[i - 1],
add_to_every_element(numbers[i], L[i - 1]))
for element in raw_list:
if value_to_equal == element:
return True
elif element < value_to_equal:
L[i].append(element)
return False
def does_larger_subset_sum_to_a_larger_number(B, C):
'''
for any two subsets B & C, if len(B) > len(C),
then sum(B) > sum(C). otherwise, return False
'''
if len(B) > len(C) and sum(B) <= sum(C):
return False
if len(C) > len(B) and sum(C) <= sum(B):
return False
return True
def all_subsets(numbers):
'''
return a set of sets, each containing
two subsets of this number collection
'''
subsets = set()
for first_length in xrange(1, len(numbers)):
for first_combo in itertools.combinations(numbers, first_length):
disjoint_numbers = [number for number in numbers if number not in first_combo]
for second_length in xrange(1, len(disjoint_numbers) + 1):
for second_combo in itertools.combinations(disjoint_numbers, second_length):
subsets.add(frozenset((first_combo, second_combo,)))
return subsets
def all_partitions(numbers):
'''
return a list of tuples, each containing all the various
partitions of this number collection
'''
partitions = list()
for length in xrange(1, len(numbers)):
for combination in itertools.combinations(numbers, length):
numbers_sans_combination = [element for element in numbers if element not in combination]
partitions.append((numbers_sans_combination, combination))
return partitions
def is_group_acceptable(numbers):
'''
verify the properties of equality and
of larger sets summing to larger numbers
for this given group of numbers
'''
for partition in all_partitions(numbers):
first, second = partition
if do_two_subsets_equal_each_other(first, sum(second)):
return False
for subset in all_subsets(numbers):
first, second = subset
if not does_larger_subset_sum_to_a_larger_number(first, second):
return False
return True
def main():
with open(os.path.join(os.curdir,
'./requiredFiles/Problem105Sets.txt')) as f:
special_sets = list()
for row in f:
numbers = set()
for number in row.split(","):
numbers.add(int(number))
if is_group_acceptable(numbers):
special_sets.append(numbers)
total = sum(sum(special_set) for special_set in special_sets)
print "Total: %d" % total
if __name__ == '__main__':
begin = time.time()
main()
end = time.time()
print "Runtime: %f seconds." % (end - begin)
|
mit
| 7,038,937,051,428,532,000
| 26.201493
| 101
| 0.601646
| false
| 3.881789
| false
| false
| false
|
erix5son/Tennis-Modelling
|
ranking_systems/tests/test_glicko2_ranking.py
|
1
|
1307
|
# -*- coding: utf-8 -*-
__author__ = 'Heungsub Lee'
from glicko2 import Glicko2, WIN, DRAW, LOSS
class almost(object):
def __init__(self, val, precision=3):
self.val = val
self.precision = precision
def almost_equals(self, val1, val2):
if round(val1, self.precision) == round(val2, self.precision):
return True
fmt = '%.{0}f'.format(self.precision)
mantissa = lambda f: int((fmt % f).replace('.', ''))
return abs(mantissa(val1) - mantissa(val2)) <= 1
def __eq__(self, other):
try:
if not self.almost_equals(self.val.volatility, other.volatility):
return False
except AttributeError:
pass
return (self.almost_equals(self.val.mu, other.mu) and
self.almost_equals(self.val.sigma, other.sigma))
def __repr__(self):
return repr(self.val)
def test_glickman_example():
env = Glicko2(tau=0.5)
r1 = env.create_rating(1500, 200, 0.06)
r2 = env.create_rating(1400, 30)
r3 = env.create_rating(1550, 100)
r4 = env.create_rating(1700, 300)
rated = env.rate(r1, [(WIN, r2), (LOSS, r3), (LOSS, r4)])
# env.create_rating2(1464.06, 151.52, 0.05999)
assert almost(rated) == env.create_rating(1464.051, 151.515, 0.05999)
|
mit
| -5,102,957,927,354,244,000
| 30.119048
| 77
| 0.58684
| false
| 3.068075
| false
| false
| false
|
NUKnightLab/TimelineJS3
|
website/app.py
|
1
|
2893
|
'''
Main entrypoint file. To run:
$ python serve.py
'''
from flask import Flask
from flask import request
from flask import render_template
from flask import json
from flask import send_from_directory
import importlib
import traceback
import sys
import os
# Add current directory to sys.path
site_dir = os.path.dirname(os.path.abspath(__file__))
examples_json = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'examples.json')
faq_json = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'faq.json')
if site_dir not in sys.path:
sys.path.append(site_dir)
# Set default FLASK_SETTINGS_MODULE for debug mode
if not os.environ.get('FLASK_SETTINGS_MODULE', ''):
os.environ['FLASK_SETTINGS_MODULE'] = 'core.settings.loc'
# Import settings module for the inject_static_url context processor.
settings_module = os.environ.get('FLASK_SETTINGS_MODULE')
try:
importlib.import_module(settings_module)
except ImportError, e:
raise ImportError(
"Could not import settings '%s' (Is it on sys.path?): %s" \
% (settings_module, e))
settings = sys.modules[settings_module]
app = Flask(__name__)
dist_dir = os.path.join(settings.PROJECT_ROOT, 'dist')
@app.context_processor
def inject_static_url():
"""
Inject the variables 'static_url' and 'STATIC_URL' into the templates to
avoid hard-coded paths to static files. Grab it from the environment
variable STATIC_URL, or use the default. Never has a trailing slash.
"""
static_url = settings.STATIC_URL or app.static_url_path
if static_url.endswith('/'):
static_url = static_url.rstrip('/')
return dict(static_url=static_url, STATIC_URL=static_url)
@app.context_processor
def inject_index_data():
return dict(examples=json.load(open(examples_json)),faqs=json.load(open(faq_json)))
@app.route('/dist/<path:path>')
def catch_build(path):
"""
Serve /dist/... urls from the build directory
"""
return send_from_directory(dist_dir, path)
@app.route('/')
@app.route('/<path:path>')
def catch_all(path='index.html', context=None):
"""Catch-all function which serves every URL."""
context = context or {}
if not os.path.splitext(path)[1]:
path = os.path.join(path, 'index.html')
return render_template(path, **context)
if __name__ == "__main__":
import getopt
ssl_context = None
port = 5000
try:
opts, args = getopt.getopt(sys.argv[1:], "sp:", ["port="])
for opt, arg in opts:
if opt == '-s':
ssl_context = 'adhoc'
elif opt in ('-p', '--port'):
port = int(arg)
else:
print 'Usage: app.py [-s]'
sys.exit(1)
except getopt.GetoptError:
print 'Usage: app.py [-s] [-p port]'
sys.exit(1)
app.run(host='0.0.0.0', port=port, debug=True, ssl_context=ssl_context)
|
mpl-2.0
| 8,731,970,829,104,129,000
| 27.643564
| 91
| 0.645351
| false
| 3.411557
| false
| false
| false
|
siemens/django-dingos-authoring
|
dingos_authoring/read_settings.py
|
1
|
2558
|
# Copyright (c) Siemens AG, 2014
#
# This file is part of MANTIS. MANTIS is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation; either version 2
# of the License, or(at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from django.conf import settings
from django.core.files.storage import FileSystemStorage
import dingos_authoring
if settings.configured and 'DINGOS_AUTHORING' in dir(settings):
dingos_authoring.DINGOS_AUTHORING_IMPORTER_REGISTRY = settings.DINGOS_AUTHORING.get('IMPORTER_REGISTRY', dingos_authoring.DINGOS_AUTHORING_IMPORTER_REGISTRY)
if settings.configured and 'DINGOS_AUTHORING' in dir(settings):
dingos_authoring.DINGOS_AUTHORING_CELERY_BUG_WORKAROUND = settings.DINGOS_AUTHORING.get('CELERY_BUG_WORKAROUND', dingos_authoring.DINGOS_AUTHORING_CELERY_BUG_WORKAROUND)
if settings.configured and 'DINGOS_AUTHORING' in dir(settings):
if not "DATA_FILESYSTEM_ROOT" in settings.DINGOS_AUTHORING:
raise NotImplementedError("Please configure a DATA_FILESYSTEM_ROOT directory in the DINGOS_AUTHORING settings (look "
"at how the MEDIA directory is defined and define an appropriate directory "
"for storing authored data (usually imported XMLs) on the filesystem. "
"Example setting : root('authoring','imports')")
else:
dingos_authoring.DINGOS_AUTHORING_DATA_FILESYSTEM_ROOT = settings.DINGOS_AUTHORING['DATA_FILESYSTEM_ROOT']
dingos_authoring.DINGOS_AUTHORING_DATA_STORAGE = FileSystemStorage(location=dingos_authoring.DINGOS_AUTHORING_DATA_FILESYSTEM_ROOT)
# We do not want the blobs to be directly available via URL.
# Reading the code it seems that setting 'base_url=None' in
# the __init__ arguments does not help, because __init__
# then choses the media URL as default url. So we have
# to set it explicitly after __init__ is done.
dingos_authoring.DINGOS_AUTHORING_DATA_STORAGE.base_url=None
|
gpl-2.0
| -708,127,362,642,221,300
| 53.446809
| 173
| 0.731431
| false
| 3.707246
| false
| false
| false
|
rob-earwaker/rail
|
rail.py
|
1
|
7049
|
import functools
import inspect
def identity(value):
return value
def not_(value):
return not value
def raise_(exception=None):
if exception is None:
raise
else:
raise exception
def try_(func, handle):
def try_func(arg):
try:
return func(arg)
except Exception as exception:
return handle(exception)
return try_func
class UnmatchedValueError(Exception):
def __init__(self, value):
self.value = value
super().__init__(str(value))
def match(*args):
return lambda value: pipe(
next(
(map_func for is_match, map_func in args if is_match(value)),
lambda _: pipe(value, UnmatchedValueError, raise_)
),
call_with(value)
)
def match_type(*args):
return match(*[
(lambda value, types=types: isinstance(value, types), map_func)
for types, map_func in args
])
def match_length(*args):
return match(*[
(
lambda value, match_len=match_len: pipe(value, len, match_len),
map_func
)
for match_len, map_func in args
])
class NamedArg:
NO_VALUE = object()
NO_DEFAULT = object()
def __init__(self, name, default=NO_DEFAULT, value=NO_VALUE):
self.name = name
self.default = default
self.value = value
def has_value(self):
return self.value != NamedArg.NO_VALUE
def has_value_or_default(self):
return self.has_value() or self.default != NamedArg.NO_DEFAULT
def value_or_default(self):
return self.value if self.has_value() else self.default
def with_value(self, value):
return NamedArg(self.name, self.default, value)
class Args:
def __init__(self, named_args, list_args, keyword_args):
self.named_args = named_args
self.list_args = list_args
self.keyword_args = keyword_args
@classmethod
def from_func(cls, func):
return pipe(
inspect.getargspec(func),
lambda argspec: pipe(
argspec.defaults if argspec.defaults is not None else (),
reversed,
list,
lambda rdefaults: pipe(
argspec.args,
reversed,
lambda rargs: [
NamedArg(name, rdefaults[index])
if len(rdefaults) > index else NamedArg(name)
for index, name in enumerate(rargs)
]
)
),
reversed,
list,
lambda named_args: cls(named_args, list_args=(), keyword_args={})
)
def get_named_arg_index(self, is_match):
return pipe(
self.named_args,
lambda args:
(index for index, arg in enumerate(args) if is_match(arg)),
lambda iterator: next(iterator, None)
)
def apply_named_arg(self, index, value):
return pipe(
self.named_args.copy(),
tee(
lambda named_args: pipe(
named_args.pop(index),
lambda arg: named_args.insert(index, arg.with_value(value))
)
),
lambda named_args: Args(
named_args, self.list_args, self.keyword_args.copy()
)
)
def apply_list_arg(self, value):
return pipe(
self.list_args + (value,),
lambda list_args: Args(
self.named_args.copy(), list_args, self.keyword_args.copy()
)
)
def apply_keyword_arg(self, name, value):
return pipe(
self.keyword_args.copy(),
tee(lambda keyword_args: keyword_args.update({name: value})),
lambda keyword_args: Args(
self.named_args.copy(), self.list_args, keyword_args
)
)
def apply_arg(self, value):
return pipe(
self.get_named_arg_index(lambda arg: not arg.has_value()),
lambda index: (
self.apply_named_arg(index, value) if index is not None
else self.apply_list_arg(value)
)
)
def apply_kwarg(self, name, value):
return pipe(
self.get_named_arg_index(lambda arg: arg.name == name),
lambda index: (
self.apply_named_arg(index, value) if index is not None
else self.apply_keyword_arg(name, value)
)
)
def apply_args(self, *args):
return functools.reduce(
lambda args, value: args.apply_arg(value), args, self
)
def apply_kwargs(self, **kwargs):
return functools.reduce(
lambda args, name: args.apply_kwarg(name, kwargs[name]),
kwargs,
self
)
def apply(self, *args, **kwargs):
return self.apply_args(*args).apply_kwargs(**kwargs)
def all_present(self):
return all(arg.has_value_or_default() for arg in self.named_args)
def named_arg_values(self):
return tuple(arg.value_or_default() for arg in self.named_args)
def execute(self, func):
args = self.named_arg_values() + self.list_args
return func(*args, **self.keyword_args)
def partial(func, applied_args=None):
@functools.wraps(func)
def partial_func(*args, **kwargs):
return pipe(
Args.from_func(func) if applied_args is None else applied_args,
lambda existing_args: existing_args.apply(*args, **kwargs),
lambda new_args: (
new_args.execute(func) if new_args.all_present()
else partial(func, new_args)
)
)
return partial_func
def compose(*funcs):
return functools.reduce(
lambda func1, func2: lambda arg: func2(func1(arg)), funcs, identity
)
def pipe(value, *funcs):
func = compose(*funcs)
return func(value)
def tee(*funcs):
return lambda arg: pipe(
arg,
compose(*funcs),
lambda _: arg
)
@partial
def call_with(value, func):
return func(value)
@partial
def lt(value2, value1):
return value1 < value2
@partial
def le(value2, value1):
return value1 <= value2
@partial
def eq(value2, value1):
return value1 == value2
@partial
def ne(value2, value1):
return value1 != value2
@partial
def gt(value2, value1):
return value1 > value2
@partial
def ge(value2, value1):
return value1 >= value2
class Track:
def __init__(self, func=identity):
self.func = func
def __call__(self, arg):
return self.func(arg)
def compose(self, *funcs):
return Track(compose(self.func, *funcs))
def fold(self, success_func, handle_func):
return self.compose(success_func).handle(handle_func)
def handle(self, *funcs):
return Track(try_(self.func, handle=compose(*funcs)))
def tee(self, *funcs):
return self.compose(tee(*funcs))
|
mit
| -5,303,923,054,830,463,000
| 24.085409
| 79
| 0.555256
| false
| 3.937989
| false
| false
| false
|
mamchecker/mamchecker
|
mamchecker/r/i/__init__.py
|
1
|
1562
|
# -*- coding: utf-8 -*-
import random
from sympy.abc import x
from sympy import log, latex
from mamchecker.hlp import Struct, norm_int as norm
jsFuncs = {'exp': 'return Math.pow(({0}),x-({1}))+({2})',
'log': 'if (x-({0})>0) return Math.log(x-({0}))+({1})',
'pow': 'return ({0})*Math.pow(x-({1}),({2}))+({3})'}
def given():
# r,i,n,m=143,3,5,50
N = 4
rs = lambda r: random.sample(r, 1)[0]
def gete():
e = e0, e1, e2 = rs([0.2, 0.5, 2, 3]), rs(
[-2, -1, 0, 1, 2]), rs([-2, -1, 0, 1, 2])
ee = e0 ** (x - e1) + e2
jse = jsFuncs['exp'].format(*e)
return (latex(ee), jse)
def getl():
l = l0, l1 = rs([-2, -1, 0, 1, 2]), rs([-2, -1, 0, 1, 2])
el = log(x - l0) + l1
jsl = jsFuncs['log'].format(*l)
return (latex(el), jsl)
def getp():
p = (p0, p1, p2, p3) = (
rs([-2, -1, -1.0 / 2, 1.0 / 2, 1, 2]),
rs([-2, -1, 0, 1, 2]),
rs([-0.2, -0.5, -2, -3, 0.2, 0.5, 2, 3]),
rs([-2, -1, 0, 1, 2]))
ep = p0 * (x - p1) ** p2 + p3
jsp = jsFuncs['pow'].format(*p)
return (latex(ep), jsp)
funcs = []
while len(funcs) < N:
f = rs([gete] * 100 + [getl] * 25 + [getp] * 1200)
while True:
nf = f()
if nf not in funcs:
funcs.append(nf)
break
order = range(len(funcs))
random.shuffle(order)
g = Struct(funcs=funcs, order=order)
return g
def calc(g):
return [o + 1 for o in g.order]
|
gpl-3.0
| 5,483,953,586,565,812,000
| 27.4
| 66
| 0.425096
| false
| 2.56486
| false
| false
| false
|
zambreno/RCL
|
sccCyGraph/graphs/check.py
|
1
|
1207
|
#!/usr/bin/env python
# encoding: utf-8
"""
untitled.py
Created by iOsama on 2013-09-24.
Copyright (c) 2013 __MyCompanyName__. All rights reserved.
"""
import sys
import os
inputfile = "reversed.mtx"
outputfile = inputfile + ".out"
x_value = 99999
def main():
# open files
fin = open(inputfile, 'r')
fout = open(outputfile, 'w')
# get graph info
line = fin.readline()
N = line.split(" ")[0]
M = line.split(" ")[1]
nonZeros = line.split(" ")[2]
if x_value > int(N):
print "ERROR: last node exceeds given N!"
exit(0)
# Count updated non-zeros
count = 0
for line in fin:
line = line.split(" ")
u = int(line[0])
v = int(line[1])
if u <= x_value and v <= x_value:
count += 1
# Write updated non-zeros
fout.write(str(x_value) + " " + str(x_value) + " " + str(count) + "\n")
fin.seek(1)
for line in fin:
line = line.split(" ")
u = int(line[0])
v = int(line[1])
if u <= x_value and v <= x_value:
if count > 1:
fout.write(str(u) + " " + str(v) + "\n")
count -= 1
else:
fout.write(str(u) + " " + str(v))
count -= 1
fin.close()
fout.close()
pass
if __name__ == '__main__':
main()
|
apache-2.0
| -5,442,214,776,072,603,000
| 17.467742
| 72
| 0.544325
| false
| 2.557203
| false
| false
| false
|
shawncaojob/LC
|
PY/361_bomb_enemy.py
|
1
|
3162
|
# 361. Bomb Enemy Add to List
# DescriptionHintsSubmissionsSolutions
# Total Accepted: 14111
# Total Submissions: 36526
# Difficulty: Medium
# Contributor: LeetCode
# Given a 2D grid, each cell is either a wall 'W', an enemy 'E' or empty '0' (the number zero), return the maximum enemies you can kill using one bomb.
# The bomb kills all the enemies in the same row and column from the planted point until it hits the wall since the wall is too strong to be destroyed.
# Note that you can only put the bomb at an empty cell.
#
# Example:
# For the given grid
#
# 0 E 0 0
# E 0 W E
# 0 E 0 0
#
# return 3. (Placing a bomb at (1,1) kills 3 enemies)
# Credits:
# Special thanks to @memoryless for adding this problem and creating all test cases.
# 2017.05.21
# When scanning. Memorizing rowhits and colhits
class Solution(object):
def maxKilledEnemies(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
"""
if not grid or not grid[0]: return 0
m, n = len(grid), len(grid[0])
res = 0
rowhits, colhits = 0, [0 for j in xrange(n)]
for i in xrange(m):
for j in xrange(n):
if j == 0 or grid[i][j-1] == 'W': # Update rowhits only at first col and after 'W'
rowhits = 0
for k in xrange(j, n):
if grid[i][k] == 'W': break
if grid[i][k] == 'E': rowhits += 1
if i == 0 or grid[i-1][j] == 'W' : # Update colhits only at first row and after 'W'
colhits[j] = 0
for k in xrange(i, m):
if grid[k][j] =="W": break
if grid[k][j] == 'E': colhits[j] += 1
if grid[i][j] == '0':
res = max(res, rowhits + colhits[j])
return res
# 2017.05.21
# Violence, m * n * (m + n)
class Solution(object):
def maxKilledEnemies(self, grid):
"""
:type grid: List[List[str]]
:rtype: int
"""
if not grid or not grid[0]: return 0
m, n = len(grid), len(grid[0])
res = 0
for i in xrange(m):
for j in xrange(n):
if grid[i][j] == '0':
res = max(res, self.bomb(grid, i, j))
return res
def bomb(self, grid, i, j):
m, n = len(grid), len(grid[0])
cnt = 0
ii, jj = i + 1, j
while ii < m:
if grid[ii][jj] == 'W': break
if grid[ii][jj] == 'E': cnt += 1
ii += 1
ii, jj = i - 1, j
while ii >= 0:
if grid[ii][jj] == 'W': break
if grid[ii][jj] == 'E': cnt += 1
ii -= 1
ii, jj = i, j + 1
while jj < n:
if grid[ii][jj] == 'W': break
if grid[ii][jj] == 'E': cnt += 1
jj += 1
ii, jj = i, j - 1
while jj >= 0:
if grid[ii][jj] == 'W': break
if grid[ii][jj] == 'E': cnt += 1
jj -= 1
return cnt
|
gpl-3.0
| -5,668,677,027,563,694,000
| 30.306931
| 151
| 0.463631
| false
| 3.392704
| false
| false
| false
|
knnniggett/weewx
|
bin/weewx/__init__.py
|
1
|
4230
|
#
# Copyright (c) 2009-2015 Tom Keffer <tkeffer@gmail.com>
#
# See the file LICENSE.txt for your full rights.
#
"""Package weewx, containing modules specific to the weewx runtime engine."""
import time
__version__="3.2.0a1"
# Holds the program launch time in unix epoch seconds:
# Useful for calculating 'uptime.'
launchtime_ts = time.time()
# Set to true for extra debug information:
debug = False
# Exit return codes
CMD_ERROR = 2
CONFIG_ERROR = 3
IO_ERROR = 4
DB_ERROR = 5
# Constants used to indicate a unit system:
METRIC = 0x10
METRICWX = 0x11
US = 0x01
#===============================================================================
# Define possible exceptions that could get thrown.
#===============================================================================
class WeeWxIOError(IOError):
"""Base class of exceptions thrown when encountering an I/O error with the console."""
class WakeupError(WeeWxIOError):
"""Exception thrown when unable to wake up or initially connect with the console"""
class CRCError(WeeWxIOError):
"""Exception thrown when unable to pass a CRC check."""
class RetriesExceeded(WeeWxIOError):
"""Exception thrown when max retries exceeded."""
class HardwareError(StandardError):
"""Exception thrown when an error is detected in the hardware."""
class UnknownArchiveType(HardwareError):
"""Exception thrown after reading an unrecognized archive type."""
class UnsupportedFeature(StandardError):
"""Exception thrown when attempting to access a feature that is not supported (yet)."""
class ViolatedPrecondition(StandardError):
"""Exception thrown when a function is called with violated preconditions."""
class StopNow(StandardError):
"""Exception thrown to stop the engine."""
class UninitializedDatabase(StandardError):
"""Exception thrown when attempting to use an uninitialized database."""
class UnknownDatabase(StandardError):
"""Exception thrown when attempting to use an unknown database."""
class UnknownBinding(StandardError):
"""Exception thrown when attempting to use an unknown data binding."""
class UnitError(ValueError):
"""Exception thrown when there is a mismatch in unit systems."""
#===============================================================================
# Possible event types.
#===============================================================================
class STARTUP(object):
"""Event issued when the engine first starts up. Services have not been loaded."""
class PRE_LOOP(object):
"""Event issued just before the main packet loop is started. Services have been loaded."""
class NEW_LOOP_PACKET(object):
"""Event issued when a new LOOP packet is available. The event contains attribute 'packet',
which is the new LOOP packet."""
class CHECK_LOOP(object):
"""Event issued in the main loop, right after a new LOOP packet has been processed. Generally,
it is used to throw an exception, breaking the main loop, so the console can be used
for other things."""
class END_ARCHIVE_PERIOD(object):
"""Event issued at the end of an archive period."""
class NEW_ARCHIVE_RECORD(object):
"""Event issued when a new archive record is available. The event contains attribute 'record',
which is the new archive record."""
class POST_LOOP(object):
"""Event issued right after the main loop has been broken. Services hook into this to
access the console for things other than generating LOOP packet."""
#===============================================================================
# Class Event
#===============================================================================
class Event(object):
"""Represents an event."""
def __init__(self, event_type, **argv):
self.event_type = event_type
for key in argv:
setattr(self, key, argv[key])
def __str__(self):
"""Return a string with a reasonable representation of the event."""
et = "Event type: %s | " % self.event_type
s = "; ".join("%s: %s" %(k, self.__dict__[k]) for k in self.__dict__ if k!="event_type")
return et + s
|
gpl-3.0
| -6,725,898,098,226,328,000
| 37.108108
| 98
| 0.613475
| false
| 4.617904
| false
| false
| false
|
tln/tatl
|
tatlrt.py
|
1
|
14490
|
# TATL runtime lib
import json, re
from warnings import warn
try: unicode
except:
# Python 3
unicode = basestring = str
apply = lambda f, args=(), kw={}: f(*args, **kw)
# Define some of the TATL built-ins. Compiler uses __all__ to determine whether name refers to a
# built-in.
null = None
false = False
true = True
len = len
__all__ = ['len', 'true', 'false', 'null']
def public(obj):
"Mark a class or function as public (aka a builtin, available from TATL templates)"
__all__.append(obj.__name__)
return obj
# A namespace of filters.
@apply
@public
class filters:
def _add(self, fn, _alias=re.compile('Alias: (\w+)')):
"""Mark a function as a filter. Include Alias: name in the docstring
to make a shortened alias.
Also add logic such that if used in def="" context (ie, given a function),
it will return a wrapper.
eg filters.trim(" s") -> "s"
filters.trim(func)(...) -> filters.trim(func(...))
"""
def f(arg, *args, **kw):
if callable(arg) and not (args or kw):
return lambda *args, **kw: fn(arg(*args, **kw))
else:
return fn(arg, *args, **kw)
name = f.__name__ = fn.__name__
doc = f.__doc__ = fn.__doc__
setattr(self, name, f)
for alias in _alias.findall(doc or ''):
setattr(self, alias, f)
return fn
# Marker for safe strings
@filters._add
class safe(unicode): "Quoted strings are 'safe' and do not get quoted again."
# Buffer logic, use fastbuf if available or lists if not
# This can be turned off/on at runtime, to enable testing using both paths
def use_fast(flag):
"Turn on fast mode, if possible. Return whether fast mode is in use."
global Buf, join, safejoin, fast
if flag:
try:
from fastbuf import Buf, set_safe_class
set_safe_class(safe)
join = unicode
safejoin = safe
fast = True
return True
except ImportError:
pass
def Buf():
return [].append
def join(b):
return u''.join(b.__self__)
def safejoin(b):
return safe(join(b))
fast = False
return False
use_fast(True)
# Quoting / escaping logic.
# Quote occurs through a type-switch mechanism which is faster than if isinstance chains.
_quote_safe = lambda s: s
def _quote_str(o):
"""Escape a str/unicode object. Note that compiled code never uses ' for attributes and >
doesn't needed to be escaped to form valid HTML. These replace calls are a big cost,
so saving 40% of them is a win.
"""
return o.replace(u'&', u'&')\
.replace(u'<', u'<')\
.replace(u"'", u''')
def _quote_other(o, q=_quote_str):
"""Escape a non-basestring, non-unicode, non-number, non-bool, non-null object.
Lists are space separated, dictionaries are repr-ed
"""
if isinstance(o, (tuple, list)):
return q(' '.join(map(unicode, o)))
return q(unicode(o))
class _Context(object):
"Context object, created for each TATL macro"
# Define type-switches for quoting
q_def = {
int: unicode,
float: '%.16g'.__mod__,
safe: _quote_safe,
}
q = {
'none': (json.dumps, {
str: str,
unicode: unicode,
}),
'attr': (_quote_other, {
str: _quote_str,
unicode: _quote_str,
}),
}
quote = None
def __init__(self, ctxname):
self.qstack = [0] # track whether .quote has been called with an empty value
self.mkquote(ctxname)
def mkquote(self, ctxname):
# Build a quoting function from type switches
from collections import defaultdict
default, typesdict = self.q[ctxname]
d = defaultdict(lambda:default, self.q_def)
d.update(typesdict)
d[None.__class__] = self._none
d[bool] = self._bool
self.quote = lambda obj: d[obj.__class__](obj)
def _none(self, arg):
self.qstack[-1] = 1
return ''
def _bool(self, arg):
if arg: return 'true'
self.qstack[-1] = 1
return ''
def star(self):
#NB broken
return _Star(self.estack[-1], self.quote), + self.push()
def plusplus(self):
#NB broken
return _Plusplus(self.estack[-1]), + self.push()
def elidestart(self):
self.qstack.append(0)
return Buf()
def elidecheck(self, emit):
checkresult = not (getattr(emit, 'blank_flag', 0) or self.qstack.pop())
return checkresult, safejoin(emit)
def load(self, name, path):
o = __import__(name) # TODO we need a whitelist here
o = getattr(o, path.pop(0)) # error if first name not found
return self.get(o, path)
def get(self, o, path):
for p in path:
o = self.get1(o, p)
return o
def applyauto(self, func, locals):
if isinstance(func, (_Star, _Plusplus)):
argnames = ['dot']
else:
co = func.__code__
argnames = co.co_varnames[:co.co_argcount]
args = [locals.get(a) for a in argnames]
result = func(*args)
return result or ''
def applyargs(self, func, *args):
result = func(*args)
return result or ''
def items(self, obj):
if obj is None:
return ()
try:
m = obj.items
except AttributeError:
return enumerate(obj)
else:
return sorted(m())
def itemsUnsorted(self, obj):
if obj is None:
return ()
try:
m = obj.items
except AttributeError:
return enumerate(obj)
else:
return m()
def iter(self, obj):
if obj is None or obj == '':
return []
elif isinstance(obj, basestring):
return [obj]
else:
return obj
def search(self, pattern, object):
if isinstance(object, basestring):
return re.search(pattern, object) is not None
return False
def range_incl(self, n, m):
# Implement n...m logic.
return range(n, m+1) if n < m else range(n, m-1, -1)
def range_excl(self, n, m):
# Implement n..m logic.
return range(n, m) if n < m else range(n-1, m-1, -1)
def get1(self, o, p):
"Implement path lookup, both {o.p} and {o[p]}"
try:
return o[p]
except (TypeError, KeyError, IndexError, AttributeError):
if not isinstance(p, basestring): return None
try:
return getattr(o, p, None)
except Exception:
pass
except Exception:
pass
warn("Unexpected error getting %r[%r]: %s" % (o, p, e))
return None
def ctx(name):
c = _Context(name)
return c, c.quote
# Used elsewhere in tatl for quoting
_attr = _Context('attr')
# Used to implement {*:x}
class _Star:
def __init__(self, l, quote):
self._l = l
self._len = len(l)
self._sp = 0
self._quote = quote
def __call__(self, o):
s = self._quote(o)
if s:
if self._sp:
s = ' '+s
self._sp = s[-1:] not in ' \n'
self._l.append(s)
return o
def __unicode__(self):
return ''.join(self._l[self._len:])
def __getitem__(self, i):
return self._l[i + self._len]
def __len__(self):
return len(self._l) - self._len
# Used to implement {++:x}
class _Plusplus:
def __init__(self, l):
self._l = l
self._ix = len(l)
l.append('0')
self.cur = 0
def __call__(self, value=""):
if value or value == "":
self.cur += 1
self._l[self._ix] = str(self.cur)
return ''
def __unicode__(self):
return unicode(self.cur)
def __cmp__(self, other):
return cmp(self.cur, other)
def __int__(self):
return self.cur
# forloop, swiss army knife of looping
class _Forloop(object):
length = 0
counter0 = None
key = None
value = None
sum = None
pre = False
post = False
prev = None
next = None
counter = property(lambda self: None if self.counter0 is None else self.counter0 + 1)
first = property(lambda self: self.counter0 == 0)
last = property(lambda self: self.counter == self.length)
def __init__(self, length, cycle=[], firstclass='first', lastclass='last', preclass='', postclass='', **opts):
self.length = length
self.cycle = cycle
self.firstclass = firstclass
self.lastclass = lastclass
self.preclass = preclass
self.postclass = postclass
def classes(self):
l = []
if self.preclass and self.pre:
l.append(self.preclass)
if self.firstclass and self.first:
l.append(self.firstclass)
if self.cycle:
l.append(self.cycle[self.counter0 % len(self.cycle)])
if self.lastclass and self.last:
l.append(self.lastclass)
if self.postclass and self.post:
l.append(self.postclass)
return ' '.join(l)
def make_next(self):
next = self.__class__(
self.length,
self.cycle,
self.firstclass,
self.lastclass,
self.preclass,
self.postclass
)
self.next = next
next.prev = self
return next
def __repr__(self):
result = '<forloop:'
for k, v in self.__dict__.items():
if k in ('prev', 'next', 'cycle') or k.endswith('class'): continue
result += ' %s=%r' % (k, v)
return result + ' classes=%r>' % self.classes()
@public
def forloop(obj, opts={}):
"Support forloop.counter, etc"
#forloop [pre] should have counter = counter0 = key = value = null
if obj is None:
return
if isinstance(obj, basestring):
obj = [obj]
agg = opts.pop('total', None)
agg = agg and Aggregator(agg)
result = _Forloop(len(obj), **opts)
if bool(result.preclass):
result.pre = True
lastresult = result
result = result.make_next()
else:
lastresult = None
for result.counter0, (result.key, result.value) in enumerate(_attr.items(obj)):
if agg: agg(result.value)
if lastresult:
yield lastresult
lastresult = result
result = result.make_next()
if lastresult:
lastresult.next = None
yield lastresult
if result.postclass or agg:
result.prev = None
result.post = True
result.key = opts.get('totalkey')
result.value = agg and agg.value()
yield result
@public
def sum(values, _builtin=sum):
try:
values = map(float, values)
except:
return None
return _builtin(values)
class Aggregator:
def __init__(self, aggregators):
if callable(aggregators):
self.aggfn = aggregators
self.consts = self.aggfns = {}
self.has_aggs = True
self.values = []
else:
l = [{}, {}]
self.aggfn = None
self.aggfns = l[True]
self.consts = l[False]
for k, v in aggregators.items():
l[callable(v)][k] = v
self.has_aggs = bool(self.aggfns or self.consts)
self.values = dict((k, []) for k in self.aggfns)
def __call__(self, value):
if not self.has_aggs: return
if self.aggfn:
self.values.append(value)
else:
for key in self.aggfns:
self.values[key].append(_attr.get1(value, key))
def value(self):
if not self.has_aggs:
return None
if self.aggfn:
return self.aggfn(self.values)
d = self.consts.copy()
for key, fn in self.aggfns.items():
d[key] = fn(self.values[key])
return d
# Additional filters
@filters._add
def url(s):
"Alias: u"
import urllib
return urllib.quote(s)
def tostr(s):
"Convert object to string with same semantics as default context"
if s is None:
return ''
if isinstance(s, basestring):
return s
if isinstance(s, float):
return '%.16g' % s
return unicode(s)
@filters._add
def trim(s):
"A filter"
return tostr(s).strip()
TAG = re.compile('(\s*<)([a-zA-Z0-9_.:-]+)(.*?>)', re.DOTALL)
# Tag-oriented filters
def _findtag(s, fn):
if not isinstance(s, basestring): return s
start = m = TAG.match(s)
if not m: return s
count = 1
p = re.compile('<(/?)%s\s*' % start.group(2))
while count:
m = p.search(s, m.end())
if not m: return s
count += -1 if m.group(1) else 1
if s[m.end()+1:].strip(): return s
return fn(s, start, m)
@public
def contents(inner):
"""
>>> contents(u' <title>HI</title> ')
u'HI'
>>> contents(u'<p>1</p><p>2</p>')
u'<p>1</p><p>2</p>'
>>> contents(u'<p><p>1</p><p>2</p></p>')
u'<p>1</p><p>2</p>'
"""
return safe(_findtag(inner, lambda s, start, end: s[start.end():end.start()]))
notpassed = object()
@public
def tag(tagname, attrs_or_inner, inner=notpassed):
"""
>>> tag('h1', {}, u'HI')
u'<h1>HI</h1>'
>>> tag('h1', {}, u'H&I')
u'<h1>H&I</h1>'
>>> tag('h1', None, safe(u'<title>HI</title>'))
u'<h1><title>HI</title></h1>'
>>> tag('h1', {'class': 'large'}, safe(u'foo:<title>HI</title>'))
u'<h1 class="large">foo:<title>HI</title></h1>'
"""
if inner is notpassed:
attstr = ''
inner = attrs_or_inner
else:
attrs = attrs_or_inner or {}
attstr = ''.join(
' %s="%s"' % (k, _attr.quote(v))
for k, v in sorted(attrs.items())
)
return safe(u'<%s>%s</%s>' % (tagname+attstr, _attr.quote(inner), tagname))
@public
def attrs(attrs, inner):
"""
>>> attrs({'id':'id123'}, u'<title>HI</title>')
u'<title id="id123">HI</title>'
"""
def _replace(s, start, end):
attstr = ''.join(' %s="%s"' % (k, _attr.quote(v)) for k, v in attrs.items())
e = start.end(2)
return s[:e]+attstr+s[e:]
return safe(_findtag(inner, _replace))
if __name__ == '__main__':
import doctest
doctest.testmod()
|
bsd-2-clause
| -4,284,716,867,018,305,000
| 26.705545
| 114
| 0.536508
| false
| 3.561947
| false
| false
| false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.