repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
previtus/MGR-Project-Code
|
Settings/set1-test_of_models_against_datasets/mix299.py
|
Python
|
mit
| 1,829
| 0.006014
|
def Setup(Settings,DefaultModel):
# set1-test_of_models_against_datasets/osm299.py
Settings["experiment_name"] = "set1_Mix_model_versus_datasets_299px"
Settings["graph_histories"] = ['together'] #['all','together',[],[1,0],[0,0,0],[]]
# 5556x_minlen30_640px 5556x_minlen20_640px 5556x_reslen20_299px 5556x_reslen30_299px
n=0
Settings["models"][n]["dataset_name"] = "5556x_reslen30_299px"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 299
Settings["models"][n]["model_type"] = 'img_osm_mix'
Settings["mod
|
els"][n]["unique_id"] = 'mix_minlen30_299px'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
Settings["models"].append(DefaultModel.copy())
n+=1
Settings["models"][n]["dataset_pointer"] = -1
|
Settings["models"][n]["dataset_name"] = "5556x_reslen20_299px"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 299
Settings["models"][n]["model_type"] = 'img_osm_mix'
Settings["models"][n]["unique_id"] = 'mix_minlen20_299px'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
Settings["models"].append(DefaultModel.copy())
n+=1
Settings["models"][n]["dataset_pointer"] = -1
Settings["models"][n]["dataset_name"] = "5556x_mark_res_299x299"
Settings["models"][n]["dump_file_override"] = 'SegmentsData_marked_R100_4Tables.dump'
Settings["models"][n]["pixels"] = 299
Settings["models"][n]["model_type"] = 'img_osm_mix'
Settings["models"][n]["unique_id"] = 'mix_nosplit_299px'
Settings["models"][n]["top_repeat_FC_block"] = 2
Settings["models"][n]["epochs"] = 800
return Settings
|
dragondjf/musicplayer
|
gui/menus/settingsmenu.py
|
Python
|
gpl-2.0
| 3,819
| 0.000262
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from gui.dwidgets import DMenu
class SettingsMenu(DMenu):
"""docstring for SettingsMenu"""
def __init__(self, parent=None):
super(SettingsMenu, self).__init__(parent)
self.parent = parent
self.menuItems = [
{
'name': self.tr('Login'),
'icon': u'',
'shortcut': u'',
'trigger': 'Login',
},
{
'name': self.tr('Show suspension window'),
'icon': u'',
'shortcut': u'',
'trigger': 'Suspension',
},
{
'name': self.tr('Show float window'),
'icon': u'',
'shortcut': u'',
'trigger': 'Float',
},
{
'name': self.tr('Show Dock window'),
'icon': u'',
'shortcut': u'',
'trigger': 'Dock',
},
{
'name': self.tr('Language'),
'trigger': 'Language',
'type': 'submenu',
'actions': [
{
'name': 'English',
'icon': u'',
'shortcut': u'',
'trigger': 'English',
"checkable": True
},
{
'name': 'Chinese',
'icon': u'',
'shortcut': u'',
'trigger': 'Chinese',
"checkable": True
},
]
},
{
'name': self.tr('Document'),
'trigger': 'Document',
'type': 'submenu',
'actions': [
{
'name': 'Android developer guide',
'icon': u'',
'shortcut': u'',
'trigger': 'AndroidDeveloper',
"checkable": False
},
{
'name': 'iOS developer guide',
'icon': u'',
'shortcut': u'',
'trigger': 'IOSDeveloper',
"checkable": False
},
{
'name': 'Ford developer center',
'icon': u'',
'shortcut': u'',
'trigger': 'FordDeveloper',
"checkable": False
},
]
},
{
'name': self.tr('ObjectView'),
'icon': u'',
'shortcut': u'',
'trigger': 'ObjectView',
},
{
'name': self.tr('About'),
'icon': u'',
'shortcut': u'Qt.Key_F12',
'trigger': 'About',
},
{
'name': self.tr('Exit'),
'icon': u'',
'shortcut': u'',
'trigger': 'Exit',
},
]
self.creatMenus(self.menuItems)
self.initConnect()
getattr(self, '%sAction' % 'English').setChecked(True)
def initConnect(self):
for item in ['English', 'Chinese']:
getattr(self, '%sAction' % item).triggered.
|
connect(self.updateChecked)
def updateChecked(self):
for item in ['English', 'Chinese']:
action = getattr(self, '%sAction' % item)
|
if self.sender() is action:
action.setChecked(True)
else:
action.setChecked(False)
|
pranjan77/kb_go_express
|
scripts/prepare_deploy_cfg.py
|
Python
|
mit
| 2,069
| 0.003867
|
import sys
import os
import os.path
from jinja2 import Template
from ConfigParser import ConfigParser
import StringIO
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: <program> <deploy_cfg_template_file> <file_with_properties>")
print("Properties from <file_with_properties> will be applied to <deploy_cfg_template_file>")
print("template which will be overwritten with .orig copy saved in the same folder first.")
sys.exit(1)
file = open(sys.argv[1], 'r')
text = file.read()
t = Template(text)
config = ConfigParser()
if os.path.isfile(sys.argv[2]):
config.read(sys.argv[2])
elif "KBASE_ENDPOINT" in os.environ:
kbase_end
|
point = os.environ.get("KBASE_ENDPOINT")
props = "[global]\n" + \
"kbase_endpoint = " + kbase_endpoint + "\n" + \
"job_service_url = " + kbase_endpoint + "/userandjobstate\n" + \
"workspace_url = " + kbase_endpoint + "/ws\n" + \
"shock_url = " + kbase_endpoint + "/shock-api\n" + \
"handle_url = " + kbase_endpoint + "/handle_service
|
\n" + \
"srv_wiz_url = " + kbase_endpoint + "/service_wizard\n" + \
"njsw_url = " + kbase_endpoint + "/njs_wrapper\n"
if "AUTH_SERVICE_URL" in os.environ:
props += "auth_service_url = " + os.environ.get("AUTH_SERVICE_URL") + "\n"
elif "auth2services" in kbase_endpoint:
props += "auth_service_url = " + kbase_endpoint + "/auth/api/legacy/KBase/Sessions/Login\n"
props += "auth_service_url_allow_insecure = " + \
os.environ.get("AUTH_SERVICE_URL_ALLOW_INSECURE", "false") + "\n"
config.readfp(StringIO.StringIO(props))
else:
raise ValueError('Neither ' + sys.argv[2] + ' file nor KBASE_ENDPOINT env-variable found')
props = dict(config.items("global"))
output = t.render(props)
with open(sys.argv[1] + ".orig", 'w') as f:
f.write(text)
with open(sys.argv[1], 'w') as f:
f.write(output)
|
NicolasT/typeclasses
|
demo/eq.py
|
Python
|
lgpl-2.1
| 1,630
| 0
|
# typeclasses, an educational implementation of Haskell-style type
# classes, in Python
#
# Copyright (C) 2010 Nicolas Trangez <eikke eikke com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, version 2.1
# of the License.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
'''Some demonst
|
rations of the Eq typeclass and its `eq` and `ne` functions'''
from typeclasses.eq import eq, ne
import typeclasses.instances.list
import typeclasses.instances.tuple
from typeclasses.instances.maybe import Just, Nothing
from typeclasses.instances.tree import Branch, Leaf
# List
assert eq([1, 2, 3], [1, 2, 3])
assert ne([0, 1, 2], [1, 2, 3])
# Tuple
assert eq((1, 2, 3, ), (1, 2, 3, ))
assert ne((0, 1, 2, ), (1, 2, 3, ))
# Maybe
assert eq(Nothing, Nothing)
assert eq(Just(1), Just(1))
assert ne(Just(1), Just(2))
assert ne(Just(1), Nothing)
# Tree
assert eq(Branch(Branch(Leaf(0), Leaf(1)), Leaf(2)),
Branch(Branch(Leaf(0), Leaf(1)), Leaf(2)))
assert ne(Branch(Branch(Leaf(0), Leaf(1)), Leaf(2)),
Branch(Branch(Leaf(0), Leaf(1)), Branch(Leaf(2), Leaf(3))))
|
glogiotatidis/mozillians-new
|
vendor-local/lib/python/kombu/tests/transport/test_amqplib.py
|
Python
|
bsd-3-clause
| 4,458
| 0.000449
|
from __future__ import absolute_import
import sys
from kombu.transport import amqplib
from kombu.connection import BrokerConnection
from kombu.tests.utils import TestCase
from kombu.tests.utils import mask_modules, Mock
class MockConnection(dict):
def __setattr__(self, key, value):
self[key] = value
class Channel(amqplib.Channel):
wait_returns = []
def _x_open(self, *args, **kwargs):
pass
def wait(self, *args, **kwargs):
return self.wait_returns
def _send_method(self, *args, **kwargs):
pass
class test_Channel(TestCase):
def setUp(self):
self.conn = Mock()
self.conn.channels = {}
self.channel = Channel(self.conn, 0)
def test_init(self):
self.assertFalse(self.channel.no_ack_consumers)
def test_prepare_message(self):
x = self.channel.prepare_message("foobar", 10,
"application/data", "utf-8",
properties={})
self.assertTrue(x)
def
|
test_message_to_python(self):
message = Mock()
message.headers = {}
message.properties = {}
self.assertTrue(self.channel.message_to_python(message))
def test_close_resolves_connection_cycle(self):
self.assertIsNotNone(self.channel.connection)
self.channel.close()
self.assertIsNone(self.channel.connection)
def test_basic_consume_registers_ack_sta
|
tus(self):
self.channel.wait_returns = "my-consumer-tag"
self.channel.basic_consume("foo", no_ack=True)
self.assertIn("my-consumer-tag", self.channel.no_ack_consumers)
self.channel.wait_returns = "other-consumer-tag"
self.channel.basic_consume("bar", no_ack=False)
self.assertNotIn("other-consumer-tag", self.channel.no_ack_consumers)
self.channel.basic_cancel("my-consumer-tag")
self.assertNotIn("my-consumer-tag", self.channel.no_ack_consumers)
class test_Transport(TestCase):
def setUp(self):
self.connection = BrokerConnection("amqplib://")
self.transport = self.connection.transport
def test_create_channel(self):
connection = Mock()
self.transport.create_channel(connection)
connection.channel.assert_called_with()
def test_drain_events(self):
connection = Mock()
self.transport.drain_events(connection, timeout=10.0)
connection.drain_events.assert_called_with(timeout=10.0)
def test_dnspython_localhost_resolve_bug(self):
class Conn(object):
def __init__(self, **kwargs):
vars(self).update(kwargs)
self.transport.Connection = Conn
self.transport.client.hostname = "localhost"
conn1 = self.transport.establish_connection()
self.assertEqual(conn1.host, "127.0.0.1:5672")
self.transport.client.hostname = "example.com"
conn2 = self.transport.establish_connection()
self.assertEqual(conn2.host, "example.com:5672")
def test_close_connection(self):
connection = Mock()
connection.client = Mock()
self.transport.close_connection(connection)
self.assertIsNone(connection.client)
connection.close.assert_called_with()
def test_verify_connection(self):
connection = Mock()
connection.channels = None
self.assertFalse(self.transport.verify_connection(connection))
connection.channels = {1: 1, 2: 2}
self.assertTrue(self.transport.verify_connection(connection))
@mask_modules("ssl")
def test_import_no_ssl(self):
pm = sys.modules.pop("kombu.transport.amqplib")
try:
from kombu.transport.amqplib import SSLError
self.assertEqual(SSLError.__module__, "kombu.transport.amqplib")
finally:
if pm is not None:
sys.modules["kombu.transport.amqplib"] = pm
class test_amqplib(TestCase):
def test_default_port(self):
class Transport(amqplib.Transport):
Connection = MockConnection
c = BrokerConnection(port=None, transport=Transport).connect()
self.assertEqual(c["host"],
"127.0.0.1:%s" % (Transport.default_port, ))
def test_custom_port(self):
class Transport(amqplib.Transport):
Connection = MockConnection
c = BrokerConnection(port=1337, transport=Transport).connect()
self.assertEqual(c["host"], "127.0.0.1:1337")
|
luozhaoyu/TTTT
|
outputSlaves.py
|
Python
|
mit
| 313
| 0.00639
|
#!/usr/bin/pytho
|
n
# -*- coding: utf-8 -*-
"""
fabfile.py
~~~~~~~~~~~~~~
A brief description goes here.
"""
try:
from config import MACHINES
except ImportError as e:
print "You should cp config.py.sample config.py, a
|
nd modify it then"
raise e
for node in MACHINES['slave']:
print node
|
gblanchard4/viamics
|
framework/tools/heatmap.py
|
Python
|
gpl-2.0
| 7,248
| 0.009934
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2010 - 2011, University of New Orleans
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# Please read the COPYING file.
#
# --
#
# Heatmap generator.
#
# This application could be used like this, too:
#
# python generate_rpy_heatmap.py -a ABUNDANCE_FILE -p 10 -m 1 --row-text-size=1.5 --column-text-size=1 --margin-right=20 --margin-bottom=10 --width=1000 --height=1200 -l
#
import os
import sys
import math
import numpy
import numpy.core.numeric
from rpy2 import robjects
import rpy2.robjects.numpy2ri
from rpy2.robjects.packages import importr
from optparse import OptionParser
def main(options, analyses_dir = ''):
if len(analyses_dir):
sample_color_map_file = os.path.join(analyses_dir, options.sample_color_map_file)
abundance_file = os.path.join(analyses_dir, options.abundance_file)
options.output_file = os.path.join(analyses_dir, options.output_file)
else:
abundance_file = options.abundance_file
sample_color_map_file = options.sample_color_map_file
#first line of the abundance file to see what sample names are
col_names = open(abundance_file).readline().strip().split
|
("\t")[1:]
#creating an entry for every bacterium in the abundance file
row_names_non_scaled = []
exprs_non_scaled = []
row_names = []
exprs = []
for line in open(abundance_file).readlines()[1:]:
row_names_non_scaled.append(line.strip().split("\t")[0])
exprs_non_scaled.append(map(float, line.strip().split("\t")[1:]))
for i in range(0, len(row_names_non_scaled)
|
):
if sum(exprs_non_scaled[i]) > options.min_percentage and len([x for x in exprs_non_scaled[i] if x > 0.0]) > options.min_present:
if options.log:
exprs.append([math.log10(x + 1) for x in exprs_non_scaled[i]])
row_names.append(row_names_non_scaled[i])
else:
exprs.append(exprs_non_scaled[i])
row_names.append(row_names_non_scaled[i])
else:
print "* Discarding '%s' (total percentage: %f, present in %d sample(s))." % (row_names_non_scaled[i], sum(exprs_non_scaled[i]), len([x for x in exprs_non_scaled[i] if x > 0.0]))
print "\n%i samples, %i bacteria\n" % (len(col_names), len(row_names))
data_matrix = numpy.array(exprs)
sample_color_map = {}
if sample_color_map_file:
for sample, desc, color in [x.strip().split('\t') for x in open(sample_color_map_file).readlines() if len(x.strip().split('\t')) == 3]:
sample_color_map[sample] = {'description': desc, 'color': color}
def sample_colour(sample_id):
if sample_color_map.has_key(sample_id):
return sample_color_map[sample_id]['color']
else:
return '#FFFFFF'
if len(col_names) < 2 or len(row_names) < 2:
raise Exception, "Number of columns or rows can't be smaller than 2 in a heatmap (you might have enetered some criteria that eliminates all OTU's or samples)."
#bioDist = importr('bioDist')
generate_heatmap(options, col_names, row_names, data_matrix, sample_colours = map(sample_colour, col_names))#,dist_func=bioDist.spearman_dist
return
def generate_heatmap(options, col_names, row_names, data_matrix, sample_colours, dist_func=robjects.r.dist):
robjects.r.library('gplots')
grdevices = importr('grDevices')
h = options.height or len(row_names) * 25
if h < 400:
h = 400
w = options.width or len(col_names) * 20
if w < 500:
w = 500
grdevices.png(options.output_file, width=w, height=h)
robjects.r('heatmap.2')(data_matrix,
labRow=row_names,
scale=options.scale,
labCol=col_names,
ColSideColors=robjects.StrVector(sample_colours),
col=robjects.r('redgreen')(100),
distfun=dist_func,
key=True,
symkey=False,
density_info="none",
trace="none",
margins=robjects.r.c(options.margin_bottom, options.margin_right), # margin right and bottom
cexRow=options.cexRow, # Y axis text size
cexCol=options.cexCol) # X axis text size
grdevices.dev_off()
return
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-a", "--abundance-file", dest="abundance_file",
type="string", help="abundance file name", metavar="FILE")
parser.add_option("-c", "--sample-color-map-file", dest="sample_color_map_file",
type="string", help="sample color map file. every line should have three columns: SAMPLE ONE_WORD_SAMPLE_DESCRIPTION COLOR", metavar="FILE")
parser.add_option("-o", "--output-file", dest="output_file", default="heatmap.png",
help="file name for the PNG", metavar="FILE")
parser.add_option("-s", "--scale", dest="scale", default="column",
help="scale either columns or rows", metavar="[row|column]")
parser.add_option("-m", "--min-percentage", dest="min_percentage", type="float", default=0.0,
help="minimum total percentage of a bug in all samples (can be bigger than 100%%)")
parser.add_option("-p", "--min-present", dest="min_present", type="int", default=0,
help="minimum total number of samples have this bug (if it present in less than --min-present samples, bug would be discarded)")
parser.add_option("-l", "--log", dest="log", default=False, action="store_true",
help="apply log10 to abundance percentages (log(abundance percentage + 1))")
parser.add_option("--width", type="int", dest="width", default=0,
help="width of the heatmap image (pixels)")
parser.add_option("--height", type="int", dest="height", default=0,
help="height of the heatmap image (pixels)")
parser.add_option("--margin-right", type="int", dest="margin_right", default=20,
help="text area between the map and the right side of the image")
parser.add_option("--margin-bottom", type="int", dest="margin_bottom", default=10,
help="text area between the map and the bottom of the image")
parser.add_option("--row-text-size", type="float", dest="cexRow", default=1.5,
help="row text size")
parser.add_option("--column-text-size", type="float", dest="cexCol", default=1,
help="column text size")
(options, args) = parser.parse_args()
if options.abundance_file == None:
print "Error: You need to provide an input file (percent or total count abundance file)\n"
parser.print_help()
sys.exit(2)
main(options)
|
adriendelsalle/unsysap
|
unsysap/components_library.py
|
Python
|
bsd-2-clause
| 4,949
| 0.001819
|
from .components_generic import System
from .ports_library import FluidPort, MechPort, CustomPort
class Duct(System):
def __init__(self, name='n/a'):
super().__init__()
self.name = name
self.params['fl_in'] = FluidPort()
self.outputs['fl_out'] = FluidPort()
self.set()
self.cst_loss = 0.99
def run(self):
self.fl_out.Pt = self.cst_loss*self.fl_in.Pt
self.fl_out.W = self.fl_in.W
self.fl_out.Tt = self.fl_in.Tt
class Splitter(System):
def __init__(self, name='n/a'):
super().__init__()
self.name = name
self.params['fl_in'] = FluidPort()
self.outputs['fl1_out'] = FluidPort()
self.outputs['fl2_out'] = FluidPort()
self.set()
self.split_ratio = 0.99
def run(self):
self.fl1_out.Pt = self.fl_in.Pt
self.fl2_out.Pt = self.fl_in.Pt
self.fl1_out.Tt = self.fl_in.Tt
self.fl2_out.Tt = self.fl_in.Tt
self.fl1_out.W = self.fl_in.W*self.split_ratio
self.fl2_out.W = self.fl_in.W*(1-self.split_ratio)
class Merger(System):
def __init__(self, name='n/a'):
super().__init__()
self.name = name
self.params['fl1_in'] = FluidPort()
self.params['fl2_in'] = FluidPort()
self.outputs['fl_out'] = FluidPort()
self.set()
def run(self):
self.fl_out.Pt = self.fl1_in.Pt
self.fl_out.Tt = self.fl1_in.Tt
self.fl_out.W = self.fl1_in.W+self.fl2_in.W
class Atm(System):
def __init__(self, name='n/a'):
super().__init__()
self.name = name
self.outputs['fl_out'] = FluidPort()
self.outputs['fl_out'].remove('W')
self.set()
def run(self):
self.fl_out.Pt = 101325
self.fl_out.Tt = 273.15
class Inlet(System):
def __init__(self, name='n/a'):
super().__init__()
self.name = name
self.params['fl_in'] = FluidPort()
self.params['fl_in'].remove('W')
self.outputs['fl_out'] = FluidPort()
self.add_port('W_in', CustomPort({'W': 100}), 'in')
self.set()
def run(self):
self.fl_out.Pt = self.fl_in.Pt*0.995
self.fl_out.Tt = self.fl_in.Tt
self.fl_out.W = self.W_in.W
class Fan(System):
def __init__(self, name='n/a'):
super().__init__()
self.name = name
self.params['fl_in'] = FluidPort()
self.params['mech_in'] = MechPort()
self.outputs['fl_out'] = FluidPort()
self.add_port('gh_in', CustomPort({'gh': 0.1}), 'in')
self.set()
#self.residuals['Wfan'] = self.fl_out.W / self.fl_in.W - 1
def run(self):
fl_in = self.fl_in
fl_out = self.fl_out
try:
self.PCNR = self.mech_in.XN / (fl_in.Tt/288.15)**0.5
except:
self.PCNR = 0.
fl_out.Pt = fl_in.Pt*(0.01*(self.PCNR+self.gh_in.gh)+1)
fl_out.Tt = fl_in.Tt*(fl_out.Pt/fl_in.Pt)**(1-1/1.4)
try:
fl_out.W = 2*(1-self.gh_in.gh)*self.PCNR/(fl_in.Tt/288.15)**0.5*(fl_in.Pt/101325.)
except:
fl_out.W = 0.
if fl_in.W < 1e-5:
self.residuals['Wfan'] = fl_out.W - fl_in.W
else:
self.residuals['Wfan'] = fl_out.W / fl_in.W - 1
if self.mech_in.PW < 1e-5:
self.residuals['PWfan'] = (fl_out.W*1004*(fl_out.Tt - fl_in.Tt)) - self.mech_in.PW
else:
self.residuals['PWfan'] = (fl_out.W*1004*(fl_out.Tt-fl_in.Tt))/self.mech_in.PW-1
class Nozzle(System):
def __init__(self, name='n/a'):
super().__init__()
self.name = name
self.params['fl_in'] = FluidPort()
self.set()
self.residuals = {'WRnozzle': 0.}
self.Acol = 0.4
self.Aexit = 0.5
def run(self):
fl = self.fl_in
try:
Qwr = fl.W*(fl.Tt/288.15)**0.5/(fl.Pt/101325)/241./self.Acol-1
except:
Qwr = fl.W*(fl.Tt/288.15)**0.5/(fl.Pt/101325)/241. - self.Acol
self.residuals['WRnozzle'] = Qwr
class FanComplex(System):
def __init__(self, name='n/a'):
super().__init__()
self.name = name
self.add(DuctComplex('duct'))
self.add(Fan('fan'))
self.fan.params['fl_in'] = self.duct.bleed.outp
|
uts['fl1_out']
self.set()
self.version = 1.
self.duct.cst_loss = 1.
def run(self):
pass
class DuctComplex(System):
def __init__(self, name='n/a'):
super().__init__()
self.name = name
self.add(Duct('duct'))
self.add(Merger('merger'))
self.add(Splitter('bleed'))
self.duct.params['fl_in'] = self.merger.outputs['fl_out']
self.bleed.par
|
ams['fl_in'] = self.duct.outputs['fl_out']
self.merger.params['fl2_in'] = self.bleed.outputs['fl2_out']
self.set()
self.version = 1.
self.duct.cst_loss = 1.
def run(self):
pass
|
Hybrid-Cloud/cinder
|
cinder/volume/drivers/blockbridge.py
|
Python
|
apache-2.0
| 21,860
| 0
|
# Copyright 2013-2015 Blockbridge Networks, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Blockbridge EPS iSCSI Volume Driver
"""
import base64
import socket
from oslo_config import cfg
from oslo_log import log as logging
from oslo_serialization import jsonutils
from oslo_utils import units
import six
from six.moves import http_client
from six.moves import urllib
from cinder import context
from cinder import exception
from cinder.i18n import _
from cinder import interface
from cinder.volume import driver
from cinder.volume import utils as volume_utils
LOG = logging.getLogger(__name__)
blockbridge_opts = [
cfg.StrOpt("blockbridge_api_host",
help="IP address/hostname of Blockbridge API."),
cfg.IntOpt("blockbridge_api_port",
help="Override HTTPS port to connect to Blockbridge "
"API server."),
cfg.StrOpt("blockbridge_auth_scheme",
default='token',
choices=['token', 'password'],
help="Blockbridge API authentication scheme (token "
"or password)"),
cfg.StrOpt("blockbridge_auth_token",
help="Blockbridge API token (for auth scheme 'token')",
secret=True),
cfg.StrOpt("blockbridge_auth_user",
help="Blockbridge API user (for auth scheme 'password')"),
cfg.StrOpt("blockbridge_auth_password",
help="Blockbridge API password (for auth scheme 'password')",
secret=True),
cfg.DictOpt("blockbridge_pools",
default={'OpenStack': '+openstack'},
help="Defines the set of exposed pools and their associated "
"backend query strings"),
cfg.StrOpt("blockbridge_default_pool",
help="Default pool name if unspecified."),
]
CONF = cfg.CONF
CONF.register_opts(blockbridge_opts)
class BlockbridgeAPIClient(object):
_api_cfg = None
def __init__(self, configuration=None):
self.configuration = configuration
def _get_api_cfg(self):
if self._api_cfg:
# return cached configuration
return self._api_cfg
if self.configuration.blockbridge_auth_scheme == 'password':
user = self.configuration.safe_get('blockbridge_auth_user')
pw = self.configuration.safe_get('blockbridge_auth_password')
creds = "%s:%s" % (user, pw)
if six.PY3:
creds = creds.encode('utf-8')
b64_creds = base64.encodestring(creds).decode('ascii')
else:
b64_creds = base64.encodestring(creds)
authz = "Basic %s" % b64_creds.replace("\n", "")
elif self.configuration.blockbridge_auth_scheme == 'token':
token = self.configuration.blockbridge_auth_token or ''
authz = "Bearer %s" % token
# set and return cached api cfg
self._api_cfg = {
'host': self.configuration.blockbridge_api_host,
'port': self.configuration.blockbridge_api_port,
'base_url': '/api/cinder',
'default_headers': {
'User-Agent': ("cinder-volume/%s" %
|
BlockbridgeISCSIDriver.VERSION),
'Accept': 'application/vnd.blockbridge-3+json',
'Authorization': authz,
},
}
return self._api_cfg
|
def submit(self, rel_url, method='GET', params=None, user_id=None,
project_id=None, req_id=None, action=None, **kwargs):
"""Submit a request to the configured API endpoint."""
cfg = self._get_api_cfg()
if cfg is None:
msg = _("Failed to determine blockbridge API configuration")
LOG.error(msg)
raise exception.VolumeBackendAPIException(data=msg)
# alter the url appropriately if an action is requested
if action:
rel_url += "/actions/%s" % action
headers = cfg['default_headers'].copy()
url = cfg['base_url'] + rel_url
body = None
# include user, project and req-id, if supplied
tsk_ctx = []
if user_id and project_id:
tsk_ctx.append("ext_auth=keystone/%s/%s" % (project_id, user_id))
if req_id:
tsk_ctx.append("id=%s", req_id)
if tsk_ctx:
headers['X-Blockbridge-Task'] = ','.join(tsk_ctx)
# encode params based on request method
if method in ['GET', 'DELETE']:
# For GET method add parameters to the URL
if params:
url += '?' + urllib.parse.urlencode(params)
elif method in ['POST', 'PUT', 'PATCH']:
body = jsonutils.dumps(params)
headers['Content-Type'] = 'application/json'
else:
raise exception.UnknownCmd(cmd=method)
# connect and execute the request
connection = http_client.HTTPSConnection(cfg['host'], cfg['port'])
connection.request(method, url, body, headers)
response = connection.getresponse()
# read response data
rsp_body = response.read()
rsp_data = jsonutils.loads(rsp_body)
connection.close()
code = response.status
if code in [200, 201, 202, 204]:
pass
elif code == 401:
raise exception.NotAuthorized(_("Invalid credentials"))
elif code == 403:
raise exception.NotAuthorized(_("Insufficient privileges"))
else:
raise exception.VolumeBackendAPIException(data=rsp_data['message'])
return rsp_data
@interface.volumedriver
class BlockbridgeISCSIDriver(driver.ISCSIDriver):
"""Manages volumes hosted on Blockbridge EPS."""
VERSION = '1.3.0'
# ThirdPartySystems wiki page
CI_WIKI_NAME = "Blockbridge_EPS_CI"
def __init__(self, *args, **kwargs):
super(BlockbridgeISCSIDriver, self).__init__(*args, **kwargs)
self.client = kwargs.get('client', None) or (
BlockbridgeAPIClient(configuration=self.configuration))
self.configuration.append_config_values(blockbridge_opts)
self.hostname = socket.gethostname()
def do_setup(self, context):
"""Set up the Blockbridge volume driver."""
pass
def check_for_setup_error(self):
"""Verify configuration is valid."""
# ensure the host is configured
if self.configuration.safe_get('blockbridge_api_host') is None:
raise exception.InvalidInput(
reason=_("Blockbridge api host not configured"))
# ensure the auth scheme is valid and has the necessary configuration.
auth_scheme = self.configuration.safe_get("blockbridge_auth_scheme")
if auth_scheme == 'password':
auth_user = self.configuration.safe_get('blockbridge_auth_user')
auth_pw = self.configuration.safe_get('blockbridge_auth_password')
if auth_user is None:
raise exception.InvalidInput(
reason=_("Blockbridge user not configured (required for "
"auth scheme 'password')"))
if auth_pw is None:
raise exception.InvalidInput(
reason=_("Blockbridge password not configured (required "
"for auth scheme 'password')"))
elif auth_scheme == 'token':
token = self.configuration.safe_get('blockbridge_auth_token')
if token is None:
raise exception.InvalidInput(
reason=_("Blockbridge token not configured (required "
"for
|
alexanderfefelov/nav
|
python/nav/web/navlets/status.py
|
Python
|
gpl-2.0
| 3,566
| 0
|
#
# Copyright (C) 2013 UNINETT AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 2 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details. You should have received a copy of the GNU General Public
# License along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
# pylint: disable=E1101
"""Status navlet"""
import simplejson
from datetime import datetime
from django.http import HttpResponse
from nav.django.utils import get_account
from nav.models.manage import Netbox
from nav.models.profiles import AccountNavlet
from nav.web.navlets import (Navlet, REFRESH_INTERVAL, NAVLET_MODE_VIEW,
NAVLET_MODE_EDIT)
from nav.web.webfront.utils import boxes_down
from nav.web.status.sections import get_user_sections
class StatusNavlet(Navlet):
"""Navlet for displaying status"""
title = "Status"
description = "Shows status for your ip-devices and services"
refresh_interval = 1000 * 60 * 10 # Refresh every 10 minutes
is_editable = True
def get_template_basename(self):
return "status"
def get(self, request, *args, **kwargs):
"""Fetch all status and display it to user"""
sections = get_user_sections(request.account)
problems = 0
for section in sections:
if section.history and section.devicehistory_type != 'a_boxDown':
problems += len(section.history)
context = self.get_context_data(**kwargs)
context['problems'] = problems
return self.render_to_response(context)
def get_context_data(self, **kwargs):
context = super(StatusNavlet, self).get_context_data(**kwargs)
if self.mode == NAVLET_MODE_VIEW:
down = boxes_down()
num_shadow = 0
for box in down:
if box.netbox.up == Netbox.UP_SHADOW:
num_shadow += 1
context['boxes_down'] = down
context['num_shadow'] = num_shadow
context['date_now'] = datetime.today()
elif self.mode == NAVLET_MODE_EDIT:
navlet = AccountNavlet.objects.get(pk=self.navlet_id)
if not navlet.preferences:
# This happens when navlet is added directly in sql
|
and no
# preference is set
navlet.preferences = {REFRESH_INTERVAL: self.refresh_interval}
navlet.save()
context['interval'] = navlet.preferences.get(
REFRESH_INTERVAL, self.refresh_inter
|
val) / 1000
return context
def post(self, request):
"""Save refresh interval for this widget"""
account = get_account(request)
try:
interval = int(request.POST.get('interval')) * 1000
except ValueError:
return HttpResponse(status=400)
try:
navlet = AccountNavlet.objects.get(pk=self.navlet_id,
account=account)
except AccountNavlet.DoesNotExist:
return HttpResponse(status=404)
else:
navlet.preferences[REFRESH_INTERVAL] = interval
navlet.save()
return HttpResponse(simplejson.dumps(navlet.preferences))
|
yugangw-msft/azure-cli
|
src/azure-cli/azure/cli/command_modules/eventhubs/tests/latest/test_eventhub_commands_consumergroup_test.py
|
Python
|
mit
| 3,921
| 0.003826
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# AZURE CLI EventHub - NAMESPACE TEST DEFINITIONS
import time
from azure.cli.testsdk import (ScenarioTest, ResourceGroupPreparer)
# pylint: disable=line-too-long
# pylint: disable=too-many-lines
class EHConsumerGroupCURDScenarioTest(ScenarioTest):
@ResourceGroupPreparer(name_prefix='cli_test_eh_consumergroup')
def test_eh_consumergroup(self, resource_group):
self.kwargs.update({
'loc': 'westus2',
'rg': resource_group,
'namespacename': self.create_random_name(prefix='eventhubs-nscli', length=20),
'tags': {'tag1: value1', 'tag2: value2'},
'sku': 'Standard',
'tier': 'Standard',
'eventhubname': self.create_random_name(prefix='eventhubs-eventhubcli', length=25),
'isautoinflateenabled': 'True',
'maximumthroughputunits': 4,
'consumergroupname': self.create_random_name(prefix='clicg', length=20),
'usermetadata1': 'usermetadata',
'usermetadata2': 'usermetadata-updated'
})
# Create Namespace
self.cmd('eventhubs namespace create --resource-group {rg} --name {namespacename} --location {loc} --tags {tags} --sku {sku} --enable-auto-inflate {isautoinflateenabled} --maximum-throughput-units {maximumthroughputunits}',
checks=[self.check('sku.name', self.kwargs['sku'])])
# Get Created Namespace
self.cmd('eventhubs namespace show --resource-group {rg} --name {namespacename}', checks=[self.check('sku.name', self.kwargs['sku'])])
# Create Eventhub
self.cmd('eventhubs eventhub create --resource-group {rg} --namespace-name {namespacename} --name {eventhubname}', checks=[self.check('name', self.kwargs['eventhubname'])])
# Get Eventhub
self.cmd('eventhubs eventhub show --resource-group {rg} --namespace-name {namespacename} --name {eventhubname}', checks=[self.check('name', self.kwargs['eventhubname'])])
# Create ConsumerGroup
self.cmd('eventhubs eventhub consumer-group create --resource-group {rg} --namespace-name {namespacename} --eventhub-name {eventhubname} --name {consumergroupname} --user-metadata {usermetadata1}', checks=[self.check('name', self.kwargs['consumergroupname'])])
# Get Consumer Group
self.cmd('eventhubs eventhub consumer-group show --resource-group {rg} --namespace-name {namespacename
|
} --eventhub-name {eventhubname} --name {consumergroupname}', checks=[self.check('name', self.kwargs['consumergroupname'])])
# Update ConsumerGroup
self.cmd('eventhubs eventhub consumer-group update --resource-group {rg} --namespace-name {namespacename} --eventhub-name {eventhubname} --name {consumergroupname} --user-me
|
tadata {usermetadata2}', checks=[self.check('userMetadata', self.kwargs['usermetadata2'])])
# Get ConsumerGroup List
listconsumergroup = self.cmd('eventhubs eventhub consumer-group list --resource-group {rg} --namespace-name {namespacename} --eventhub-name {eventhubname}').output
self.assertGreater(len(listconsumergroup), 0)
# Delete ConsumerGroup
self.cmd('eventhubs eventhub consumer-group delete --resource-group {rg} --namespace-name {namespacename} --eventhub-name {eventhubname} --name {consumergroupname}')
# Delete Eventhub
self.cmd('eventhubs eventhub delete --resource-group {rg} --namespace-name {namespacename} --name {eventhubname}')
# Delete Namespace
self.cmd('eventhubs namespace delete --resource-group {rg} --name {namespacename}')
|
djangocali/blog-api
|
blog-api/config/production.py
|
Python
|
bsd-3-clause
| 4,350
| 0.001839
|
# -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use Amazon's S3 for storing static files and uploaded media
- Use sendgrid to send emails
- Use MEMCACHIER on Heroku
'''
from configurations import values
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
try:
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
except ImportError:
# TODO: Fix this where even if in Dev this class is called.
pass
from .common import Common
class Production(Common):
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# INSTALLED_APPS
INSTALLED_APPS = Common.INSTALLED_APPS
# END INSTALLED_APPS
# SECRET KEY
SECRET_KEY = values.SecretValue()
# END SECRET KEY
# django-secure
INSTALLED_APPS += ("djangosecure", )
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True)
SECURE_FRAME_DENY = values.BooleanValue(True)
SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True)
SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True)
SESSION_COOKIE_SECURE = values.BooleanValue(False)
SESSION_COOKIE_HTTPONLY = values.BooleanValue(True)
SECURE_SSL_REDIRECT = values.BooleanValue(True)
# end django-secure
# SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
INSTALLED_APPS += ("gunicorn", )
# STORAGE CONFIGURATION
# See: http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
AWS_ACCESS_KEY_ID = values.SecretValue()
AWS_SECRET_ACCESS_KEY = values.SecretValue()
AWS_STORAGE_BUCKET_NAME = values.SecretValue()
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
# see: https://github.com/antonagestam/collectfast
AWS_PRELOAD_METADATA = True
INSTALLED_APPS += ('collectfast', )
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
AWS_HEADERS = {
'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY)
}
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
# END STORAGE CONFIGURATION
# EMAIL
DEFAULT_FROM_EMAIL = values.Value('Blog API <noreply@djangocali.com>')
EMAIL_HOST = values.Value('smtp.sendgrid.com')
EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="", environ_name="SENDGRID_PASSWORD")
EMAIL_HOST_USER = values.SecretValue(environ_prefix="", environ_name="SENDGRID_USERNAME")
EMAIL_PORT = values.IntegerValue(587, environ_prefix="", environ_name="EMAIL_PORT")
EMAIL_SUBJECT_PREFIX = values.Value('[Blog API] ', environ_name="EMAIL_SUBJECT_PREFIX")
EMAIL_USE_TLS = True
SERVER_EMAIL = EMAIL_HOST_USER
# END EMAIL
# TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# END TEMPLATE CONFIGURATION
# CACHING
# Only do this here because thanks to django-pylibmc-sasl and pylibmc
# memcacheify is painful to install on windows.
try:
# See: https://github.com/rdegges/django-heroku-memcac
|
heify
from memcacheify import memcacheify
CACHES = memcac
|
heify()
except ImportError:
CACHES = values.CacheURLValue(default="memcached://127.0.0.1:11211")
# END CACHING
# Your production stuff: Below this line define 3rd party libary settings
|
ymap/aioredis
|
tests/conftest.py
|
Python
|
mit
| 22,698
| 0
|
import asyncio
import pytest
import socket
import subprocess
import sys
import contextlib
import os
import ssl
import time
import logging
import tempfile
import atexit
from collections import namedtuple
from urllib.parse import urlencode, urlunparse
from async_timeout import timeout as async_timeout
import aioredis
import aioredis.sentinel
TCPAddress = namedtuple('TCPAddress', 'host port')
RedisServer = namedtuple('RedisServer',
'name tcp_address unixsocket version password')
SentinelServer = namedtuple('SentinelServer',
'name tcp_address unixsocket version masters')
# Public fixtures
@pytest.yield_fixture
def loop():
"""Creates new event loop."""
loop = asyncio.new_event_loop()
asyncio.set_event_loop(None)
try:
yield loop
finally:
if hasattr(loop, 'is_closed'):
closed = loop.is_closed()
else:
closed = loop._closed # XXX
if not closed:
loop.call_soon(loop.stop)
loop.run_forever()
loop.close()
@pytest.fixture(scope='session')
def unused_port():
"""Gets random free port."""
def fun():
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind(('127.0.0.1', 0))
return s.getsockname()[1]
return fun
@pytest.fixture
def create_connection(_closable, loop):
"""Wrapper around aioredis.create_connection."""
async def f(*args, **kw):
kw.setdefault('loop', loop)
conn = await aioredis.create_connection(*args, **kw)
_closable(conn)
return conn
return f
@pytest.fixture(params=[
aioredis.create_redis,
aioredis.create_redis_pool],
ids=['single', 'pool'])
def create_redis(_closable, loop, request):
"""Wrapper around aioredis.create_redis."""
factory = request.param
async def f(*args, **kw):
kw.setdefault('loop', loop)
redis = await factory(*args, **kw)
_closable(redis)
return redis
return f
@pytest.fixture
def create_pool(_closable, loop):
"""Wrapper around aioredis.create_pool."""
async def f(*args, **kw):
kw.setdefault('loop', loop)
redis = await aioredis.create_pool(*args, **kw)
_closable(redis)
return redis
return f
@pytest.fixture
def create_sentinel(_closable, loop):
"""Helper instantiating RedisSentinel client."""
async def f(*args, **kw):
kw.setdefault('loop', loop)
# make it fail fast on slow CIs (if timeout argument is ommitted)
kw.setdefault('timeout', .001)
client = await aioredis.sentinel.create_sentinel(*args, **kw)
_closable(client)
return client
return f
@pytest.fixture
def pool(create_pool, server, loop):
"""Returns RedisPool instance."""
pool = loop.run_until_complete(
create_pool(server.tcp_address, loop=loop))
return pool
@pytest.fixture
def redis(create_redis, server, loop):
"""Returns Redis client instance."""
redis = loop.run_until_complete(
create_redis(server.tcp_address, loop=loop))
loop.run_until_complete(redis.flushall())
return redis
@pytest.fixture
def redis_sentinel(create_sentinel, sentinel, loop):
"""Returns Redis Sentinel client instance."""
redis_sentinel = loop.run_until_complete(
create_sentinel([sentinel.tcp_address], timeout=2, loop=loop))
assert loop.run_until_complete(redis_sentinel.ping()) == b'PONG'
return redis_sentinel
@pytest.yield_fixture
def _closable(loop):
conns = []
try:
yield conns.append
finally:
waiters = []
while conns
|
:
conn = conns.pop(0)
conn.close()
waiters.append(conn.wait_closed())
if waiters:
loop.run_until_complete(asyncio.gather(*waiters, loop=loop))
@pytest.fixture(scope='session')
def server(start_server):
"""Starts redis-server instance."""
return start_server('A
|
')
@pytest.fixture(scope='session')
def serverB(start_server):
"""Starts redis-server instance."""
return start_server('B')
@pytest.fixture(scope='session')
def sentinel(start_sentinel, request, start_server):
"""Starts redis-sentinel instance with one master -- masterA."""
# Adding master+slave for normal (no failover) tests:
master_no_fail = start_server('master-no-fail')
start_server('slave-no-fail', slaveof=master_no_fail)
# Adding master+slave for failover test;
masterA = start_server('masterA')
start_server('slaveA', slaveof=masterA)
return start_sentinel('main', masterA, master_no_fail)
@pytest.fixture(params=['path', 'query'])
def server_tcp_url(server, request):
def make(**kwargs):
netloc = '{0.host}:{0.port}'.format(server.tcp_address)
path = ''
if request.param == 'path':
if 'password' in kwargs:
netloc = ':{0}@{1.host}:{1.port}'.format(
kwargs.pop('password'), server.tcp_address)
if 'db' in kwargs:
path = '/{}'.format(kwargs.pop('db'))
query = urlencode(kwargs)
return urlunparse(('redis', netloc, path, '', query, ''))
return make
@pytest.fixture
def server_unix_url(server):
def make(**kwargs):
query = urlencode(kwargs)
return urlunparse(('unix', '', server.unixsocket, '', query, ''))
return make
# Internal stuff #
def pytest_addoption(parser):
parser.addoption('--redis-server', default=[],
action="append",
help="Path to redis-server executable,"
" defaults to `%(default)s`")
parser.addoption('--ssl-cafile', default='tests/ssl/cafile.crt',
help="Path to testing SSL CA file")
parser.addoption('--ssl-dhparam', default='tests/ssl/dhparam.pem',
help="Path to testing SSL DH params file")
parser.addoption('--ssl-cert', default='tests/ssl/cert.pem',
help="Path to testing SSL CERT file")
parser.addoption('--uvloop', default=False,
action='store_true',
help="Run tests with uvloop")
def _read_server_version(redis_bin):
args = [redis_bin, '--version']
with subprocess.Popen(args, stdout=subprocess.PIPE) as proc:
version = proc.stdout.readline().decode('utf-8')
for part in version.split():
if part.startswith('v='):
break
else:
raise RuntimeError(
"No version info can be found in {}".format(version))
return tuple(map(int, part[2:].split('.')))
@contextlib.contextmanager
def config_writer(path):
with open(path, 'wt') as f:
def write(*args):
print(*args, file=f)
yield write
REDIS_SERVERS = []
VERSIONS = {}
def format_version(srv):
return 'redis_v{}'.format('.'.join(map(str, VERSIONS[srv])))
@pytest.fixture(scope='session')
def start_server(_proc, request, unused_port, server_bin):
"""Starts Redis server instance.
Caches instances by name.
``name`` param -- instance alias
``config_lines`` -- optional list of config directives to put in config
(if no config_lines passed -- no config will be generated,
for backward compatibility).
"""
version = _read_server_version(server_bin)
verbose = request.config.getoption('-v') > 3
servers = {}
def timeout(t):
end = time.time() + t
while time.time() <= end:
yield True
raise RuntimeError("Redis startup timeout expired")
def maker(name, config_lines=None, *, slaveof=None, password=None):
assert slaveof is None or isinstance(slaveof, RedisServer), slaveof
if name in servers:
return servers[name]
port = unused_port()
tcp_address = TCPAddress('localhost', port)
if sys.platform == 'win32':
unixsocket = None
else:
unixsocket = '/tmp/aioredis.{}.sock'.format(port)
dumpfile = 'dump-{}.rdb'.format(port)
data_dir = tempfile.gettempdir()
dumpfile_path = os.path.join(data_dir, dump
|
jwlockhart/concept-networks
|
examples/draw_tripartite.py
|
Python
|
gpl-3.0
| 3,581
| 0.021502
|
# @author Jeff Lockhart <jwlock@umich.edu>
# Script for drawing the tripartite network underlying analysis.
# version 1.0
import pandas as pd
import networkx as nx
import matplotlib.pyplot as plt
import sys
#add the parent directory to the current session's path
sys.path.insert(0, '../')
from network_utils import *
#read our cleaned up data
df = pd.read_csv('../data/sgm_stud/merged.tsv', sep='\t')
#The list of codes we're interested in.
code_col
|
s = ['culture_problem',
#'culture_absent',
|
'culture_solution',
'culture_helpless',
'culture_victim',
'cishet_problem',
'cishet_victim',
'cishet_solution',
#'cishet_absent',
'cishet_helpless',
'sgm_victim',
'sgm_problem',
'sgm_helpless',
#'sgm_absent',
'sgm_solution',
'school_problem',
'school_solution',
#'school_absent',
'school_victim',
'school_helpless',
'community_problem',
'community_solution',
'community_helpless',
#'community_absent',
'community_victim']
#generate unique ID keys for each student and excerpt
def s_id(row):
return row['uni'] + str(row['Participant'])
def e_id(row):
return row['s_id'] + '-' + str(row['Start'])
df['s_id'] = df.apply(s_id, axis=1)
df['e_id'] = df.apply(e_id, axis=1)
#make a graph
g = nx.Graph()
#add all of our codes as nodes
for c in code_cols:
g.add_node(c, t='code')
#add each excerpt of text as a node. Connect it with relevant
#students and codes.
st = []
ex = []
last = ''
for row in df.iterrows():
#add the student node
g.add_node(row[1]['s_id'], t='student')
#if we haven't seen this student before, save the order we saw them in
if last != row[1]['s_id']:
last = row[1]['s_id']
st.append(last)
#add this excerpt node. Save its order to our list.
g.add_node(row[1]['e_id'], t='excerpt')
ex.append(row[1]['e_id'])
#add the edge joining this student and excerpt.
g.add_edge(row[1]['s_id'], row[1]['e_id'])
#for each code this excerpt has, draw an edge to it
for c in code_cols:
if row[1][c]:
g.add_edge(row[1]['e_id'], c)
#get a dictionary of our code nodes' labels
l = {}
for c in code_cols:
l[c] = c
#fix the positions of each node type in columns
pos = dict()
#space out the student and code nodes to align with excerpt column height
pos.update( (n, (1, i*5.57)) for i, n in enumerate(st) )
pos.update( (n, (2, i)) for i, n in enumerate(ex) )
pos.update( (n, (3, i*90)) for i, n in enumerate(code_cols) )
#make our figure big so we can see
plt.figure(figsize=(20,20))
#draw our nodes
nx.draw_networkx_nodes(g, pos, nodelist=st, node_color='r',
node_shape='^')
nx.draw_networkx_nodes(g, pos, nodelist=ex, node_color='b',
node_shape='o', alpha=0.5)
#draw our edges with low alpha so we can see
nx.draw_networkx_edges(g, pos, alpha=0.2)
#axes look silly
plt.axis('off')
#save the edges and nodes as one image
plt.savefig('../data/tripartite_unlabeled.png')
#save the labels for the codes as a different image
#this lets me edit them in with GIMP so that they're better positioned.
plt.figure(figsize=(20,20))
nx.draw_networkx_labels(g, pos, labels=l, font_size=20)
nx.draw_networkx_edges(g, pos, alpha=0)
plt.axis('off')
plt.savefig('../data/tripartite_labeles.png')
|
sputnick-dev/weboob
|
modules/gdfsuez/pages/homepage.py
|
Python
|
agpl-3.0
| 2,508
| 0.001595
|
# -*- coding: utf-8 -*-
# Copyright(C) 2013 Mathieu Jourdan
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your optio
|
n) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from datetime import date
from weboob.deprecated.brow
|
ser import Page
from weboob.capabilities.bill import Subscription
class LoginPage(Page):
def login(self, login, password):
self.browser.select_form('symConnexionForm')
self.browser["portlet_login_plein_page_3{pageFlow.mForm.login}"] = unicode(login)
self.browser["portlet_login_plein_page_3{pageFlow.mForm.password}"] = unicode(password)
self.browser.submit()
class HomePage(Page):
def on_loaded(self):
pass
class AccountPage(Page):
def get_subscription_list(self):
table = self.document.xpath('//table[@id="ensemble_contrat_N0"]')[0]
if len(table) > 0:
# some clients may have subscriptions to gas and electricity,
# but they receive a single bill
# to avoid "boobill details" and "boobill bills" returning the same
# table twice, we could return only one subscription for both.
# We do not, and "boobill details" will take care of parsing only the
# relevant section in the bill files.
for line in table[0].xpath('//tbody/tr'):
cells = line.xpath('td')
snumber = cells[2].attrib['id'].replace('Contrat_', '')
slabel = cells[0].xpath('a')[0].text.replace('offre', '').strip()
d = unicode(cells[3].xpath('strong')[0].text.strip())
sdate = date(*reversed([int(x) for x in d.split("/")]))
sub = Subscription(snumber)
sub._id = snumber
sub.label = slabel
sub.subscriber = unicode(cells[1])
sub.renewdate = sdate
yield sub
class TimeoutPage(Page):
def on_loaded(self):
pass
|
intel-analytics/BigDL
|
python/orca/test/bigdl/orca/learn/test_metrics.py
|
Python
|
apache-2.0
| 8,250
| 0.000485
|
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import torch
import pytest
def test_torch_Accuracy():
from bigdl.orca.learn.pytorch.pytorch_metrics import Accuracy
pred = torch.tensor([0, 2, 3, 4])
target = torch.tensor([1, 2, 3, 4])
acc = Accuracy()
acc(pred, target)
assert acc.compute() == 0.75
pred = torch.tensor([0, 2, 3, 4])
target = torch.tensor([1, 1, 2, 4])
acc(pred, target)
assert acc.compute() == 0.5
def test_torch_BinaryAccuracy():
from bigdl.orca.learn.pytorch.pytorch_metrics import BinaryAccuracy
target = torch.tensor([1, 1, 0, 0])
pred = torch.tensor([0.98, 1, 0, 0.6])
bac = BinaryAccuracy()
bac(pred, target)
assert bac.compute() == 0.75
target = torch.tensor([1, 1, 0, 0])
pred = torch.tensor([0.98, 1, 0, 0.6])
bac(pred, target, threshold=0.7)
assert bac.compute() == 0.875
|
def test_torch_CategoricalAccuracy():
from bigdl.orca.learn.pytorch.pytorch_metrics import
|
CategoricalAccuracy
pred = torch.tensor([[0.1, 0.9, 0.8], [0.05, 0.95, 0]])
target = torch.tensor([[0, 0, 1], [0, 1, 0]])
cacc = CategoricalAccuracy()
cacc(pred, target)
assert cacc.compute() == 0.5
pred = torch.tensor([[0.1, 0.9, 0.8], [0.05, 0.95, 0]])
target = torch.tensor([[0, 1, 0], [0, 1, 0]])
cacc(pred, target)
assert cacc.compute() == 0.75
def test_torch_SparseCategoricalAccuracy():
from bigdl.orca.learn.pytorch.pytorch_metrics import SparseCategoricalAccuracy
pred = torch.tensor([[0.1, 0.9, 0.8], [0.05, 0.95, 0]])
target = torch.tensor([[2], [1]])
scacc = SparseCategoricalAccuracy()
scacc(pred, target)
assert scacc.compute() == 0.5
pred = torch.tensor([[0.1, 0.9, 0.8], [0.05, 0.95, 0]])
target = torch.tensor([2, 0])
scacc(pred, target)
assert scacc.compute() == 0.25
def test_torch_Top5Accuracy():
from bigdl.orca.learn.pytorch.pytorch_metrics import Top5Accuracy
pred = torch.tensor([[0.1, 0.9, 0.8, 0.4, 0.5, 0.2],
[0.05, 0.95, 0, 0.4, 0.5, 0.2]])
target = torch.tensor([2, 2])
top5acc = Top5Accuracy()
top5acc(pred, target)
assert top5acc.compute() == 0.5
pred = torch.tensor([[0.1, 0.9, 0.8, 0.4, 0.5, 0.2],
[0.05, 0.95, 0, 0.4, 0.5, 0.2]])
target = torch.tensor([[2], [1]])
top5acc(pred, target)
assert top5acc.compute() == 0.75
def test_torch_MAE():
from bigdl.orca.learn.pytorch.pytorch_metrics import MAE
pred = torch.tensor([[1, -2], [1, 1]])
target = torch.tensor([[0, 1], [0, 1]])
m = MAE()
m(pred, target)
assert m.compute() == 1.25
pred = torch.tensor([[1, 1], [1, 1]])
target = torch.tensor([[0, 1], [0, 1]])
m(pred, target)
assert m.compute() == 0.875
pred = torch.tensor([[1.5, 2.5], [1.0, 1.0]])
target = torch.tensor([[0.2, 1.1], [0.5, 1.0]])
m(pred, target)
assert abs(m.compute() - 0.85) < 1e-7 # add fault tolerance for floating point precision
pred = torch.tensor([[1.5, 2.5, 1.5, 2.5], [1.8, 2.0, 0.5, 4.5]])
target = torch.tensor([[0, 1, 0, 0], [0, 1, 2, 2]])
m(pred, target)
assert abs(m.compute() - 1.2) < 1e-7
def test_torch_MSE():
from bigdl.orca.learn.pytorch.pytorch_metrics import MSE
pred = torch.tensor([[1, -2], [1, 1]])
target = torch.tensor([[1, 1], [1, 1]])
m = MSE()
m(pred, target)
assert m.compute() == 2.25
pred = torch.tensor([[1, 1], [1, 1]])
target = torch.tensor([[1, 1], [0, 1]])
m(pred, target)
assert m.compute() == 1.25
pred = torch.tensor([[1.3, 1.0], [0.2, 1.0]])
target = torch.tensor([[1.1, 1.0], [0.0, 1.0]])
m(pred, target)
assert abs(m.compute() - 0.84) < 1e-7
pred = torch.tensor([[1.2, 1.2, 1.2, 1.8], [0.2, 0.8, 0.9, 1.1]])
target = torch.tensor([[1, 1, 1, 2], [0, 1, 1, 1]])
m(pred, target)
assert abs(m.compute() - 0.517) < 1e-7
def test_torch_BinaryCrossEntropy():
from bigdl.orca.learn.pytorch.pytorch_metrics import BinaryCrossEntropy
pred = torch.tensor([[0.6, 0.4], [0.4, 0.6]])
target = torch.tensor([[0, 1], [0, 0]])
entropy = BinaryCrossEntropy()
entropy(pred, target)
assert abs(entropy.compute() - 0.81492424) < 1e-6
pred = torch.tensor([0.6, 0.4, 0.4, 0.6])
target = torch.tensor([0, 1, 0, 0])
entropy(pred, target)
assert abs(entropy.compute() - 0.81492424) < 1e-6
def test_torch_CategoricalCrossEntropy():
from bigdl.orca.learn.pytorch.pytorch_metrics import CategoricalCrossEntropy
pred = torch.tensor([[0.05, 0.95, 0], [0.1, 0.8, 0.1]])
target = torch.tensor([[0, 1, 0], [0, 0, 1]])
entropy = CategoricalCrossEntropy()
entropy(pred, target)
assert abs(entropy.compute() - 1.1769392) < 1e-6
def test_torch_SparseCategoricalCrossEntropy():
from bigdl.orca.learn.pytorch.pytorch_metrics import SparseCategoricalCrossEntropy
pred = torch.tensor([[0.05, 0.95, 0], [0.1, 0.8, 0.1]])
target = torch.tensor([1, 2])
entropy = SparseCategoricalCrossEntropy()
entropy(pred, target)
assert abs(entropy.compute() - 1.1769392) < 1e-6
def test_torch_KLDivergence():
from bigdl.orca.learn.pytorch.pytorch_metrics import KLDivergence
pred = torch.tensor([[0.6, 0.4], [0.4, 0.6]])
target = torch.tensor([[0, 1], [0, 0]])
div = KLDivergence()
div(pred, target)
assert abs(div.compute() - 0.45814) < 1e-5
def test_torch_Poisson():
from bigdl.orca.learn.pytorch.pytorch_metrics import Poisson
pred = torch.tensor([[1, 1], [0, 0]])
target = torch.tensor([[0, 1], [0, 0]])
poisson = Poisson()
poisson(pred, target)
assert abs(poisson.compute() - 0.49999997) < 1e-6
def test_torch_AUC():
from bigdl.orca.learn.pytorch.pytorch_metrics import AUROC
pred = torch.tensor([0.3, 0.4, 0.2, 0.5, 0.6, 0.7, 0.8])
target = torch.tensor([0, 1, 0, 1, 1, 1, 1.0])
auc = AUROC()
auc(pred, target)
print(auc.compute())
assert (auc.compute() - 1.0) < 1e-6
def test_torch_ROC():
from bigdl.orca.learn.pytorch.pytorch_metrics import ROC
pred = torch.tensor([0.3, 0.6, 0.7, 0.8])
target = torch.tensor([0, 1, 1, 1.0])
auc = ROC()
auc(pred, target)
x, y, z = auc.compute()
assert (x[4] == 1.)
assert (y[4] == 1.)
assert (z[4] - 0.3 < 10e-6)
def test_torch_F1Score():
from bigdl.orca.learn.pytorch.pytorch_metrics import F1Score
target = torch.tensor([0, 1, 2, 0, 1, 2])
preds = torch.tensor([0, 2, 1, 0, 0, 1])
f1 = F1Score()
f1(preds, target)
score = f1.compute()
assert (score - 0.3332 < 1e-3)
def test_torch_Precision():
from bigdl.orca.learn.pytorch.pytorch_metrics import Precision
target = torch.tensor([0, 1, 1, 0, 1, 1])
preds = torch.tensor([0, 0.2, 1.0, 0.8, 0.6, 0.5])
precision = Precision()
precision(preds, target)
assert (precision.compute() - 0.75 < 10e-6)
def test_torch_Recall():
from bigdl.orca.learn.pytorch.pytorch_metrics import Recall
target = torch.tensor([0, 1, 1, 0, 1, 1])
preds = torch.tensor([0, 0.2, 1.0, 0.8, 0.6, 0.5])
recall = Recall()
recall(preds, target)
assert (recall.compute() - 0.75 < 10e-6)
def test_torch_PrecisionRecallCurve():
from bigdl.orca.learn.pytorch.pytorch_metrics import PrecisionRecallCurve
target = torch.tensor([0, 1, 1, 0, 1, 1])
preds = torch.tensor([0, 0.2, 1.0, 0.8, 0.6, 0.5])
curve = PrecisionRecallCurve()
curve(preds, target)
print(curve.compute())
precision, recall, thresholds = curve.compute()
assert (precision[0] - 0.8 < 10e-6)
assert (recall[0] - 1.0 < 10e-6)
assert (thresholds[0] -
|
priyankamandikal/arowf
|
backlog.py
|
Python
|
apache-2.0
| 3,939
| 0.030972
|
# -*- coding: utf-8 -*-
### denote lines that need to be changed for different categories
import sys
reload(sys)
sys.setdefaultencoding("utf-8") # to handle UnicodeDecode errors
from math import ceil # top 20% of rankings
from traceback import format_exc # to handle errors
import pickle # to store article rankings
import json # for parsing the json response
from urllib2 import urlopen # to load urls
from os import path, listdir
from operator import itemgetter # to rank articles in the order of decreasing pageviews in a list
# from collections import OrderedDict # to store articles in the order of decreasing pageviews in a dict
from pageviews import format_date, article_views # to get pageviews
# cmlimit to specify number of articles to extract, max can be 500 (5000 for bots)
# cmtitle for name of Category to look in
# cmstartsortkeyprefix for starting the article listing from a particular alphabet or set of alphabets,
# 'b' for PA outdated
category_api_url = 'https://en.wikipedia.org/w/api.php?action=query&list=categorymembers&cmlimit=500&format=json&cmstartsortkeyprefix=m' ###
recdir = 'TL_records' + path.sep ###
def nextrecord():
try:
records = listdir(recdir)
record = 1+int(max(records)[:9])
### todo: check for improperly named files
return format(record, '09')
except:
return format(1, '09')
if __name__ == '__main__':
#category_list = ['Category:All_Wikipedia_articles_in_need_of_updating',
# 'Category:All_NPOV_disputes']
try:
category_url = '&cmtitle='.join([category_api_url, 'Category:All_NPOV_disputes']) ###
json_obj = urlopen(category_url).read()
except:
print "Error while obtaining articles from Category API"
print format_exc()
readable_json = json.loads(json_obj)
cnt = 0
d = [] # list of lists of rankings to be stored in a pickle file
for ele in readable_json['query']['categorymembers']:
title = ele['title']
link = '/'.join(['https://en.wikipedia.org/wiki', title.replace(' ', '_')])
categ = 'Category:All_NPOV_disputes' ###
pageviews = article_views(title)
print cnt+1, title, pageviews
d.append([title, link, pageviews, categ])
cnt = cnt+1
# od = OrderedDict(sorted(d.items(), key=lambda t:t[1][1], reverse=True)) # ordered dict in descending order of final score
od = sorted(d, key=itemgetter(2), reverse=True) # ordered list in descending order of pageviews
print '\n\nArticle rankings based on pageviews:\n'
for item in od:
print item
#with open('npov_b_ranking.pkl', 'wb') as f:
with open('TL_pickles/npov_m_ranking.pkl', 'wb') as f: ###
pickle.dump(od, f)
# if __name__ == '__main__':
# with open('PA_pickles/npov_m_ranking.pkl', 'rb') as f: ### use when od has already been created; comment above stuff
# od =
|
load(f)
cnt = 0
counter = int(ceil(0.2*len(od))) # top 20% of rankings
#url = 'http://127.0.0.1:5000/ask' # url for POSTing to ask. Replace with Labs/PythonAnywhere instance if needed
for i in od:
# POSTing to ask
# data = {'question':'The article '+i[1]+' is in https://en.wikipedia.org/wiki/'+i[3]+'.\nHow would you resolve it?\n'+i[3],
# 'iframeurl':i[1]}
# r = requests.post(url, data=data)
|
fn = recdir + nextrecord() + 'q'
print fn
if path.exists(fn):
print('A billion questions reached! Start answering!')
exit()
f = open(fn, 'w')
# use 'How would you resolve it?' for NPOV and 'How would you update it?' for outdated
f.write('The article <a target="_blank" href="' + i[1] + '">' + i[0] +
'</a> is in <a target="_blank" href = "https://en.wikipedia.org/wiki/'+ i[3] + '">' + i[3] +
'</a>. How would you resolve it?<br/><a style="float:right;" href="' +
i[1] + '">'+i[1]+'</a><iframe src="' + i[1] +
'" style="height: 40%; width: 100%;">[Can not display <a target="_blank" href="' + i[1] + '">'
+ i[1] + '</a> inline as an iframe here.]</iframe>') ###
f.close()
cnt += 1
if (cnt == counter):
exit()
|
egabancho/invenio-knowledge
|
invenio_knowledge/models.py
|
Python
|
gpl-2.0
| 12,573
| 0
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2011, 2012, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Knowledge database models."""
import os
from invenio_base.globals import cfg
from invenio.ext.sqlalchemy import db
from invenio.ext.sqlalchemy.utils import session_manager
from invenio_collections.models import Collection
from invenio.utils.text import slugify
from sqlalchemy.dialects import mysql
from sqlalchemy.event import listens_for
from sqlalchemy.orm.collections import attribute_mapped_collection
from sqlalchemy.schema import Index
class KnwKB(db.Model):
"""Represent a KnwKB record."""
KNWKB_TYPES = {
'written_as': 'w',
'dynamic': 'd',
'taxonomy': 't',
}
__tablename__ = 'knwKB'
id = db.Column(db.MediumInteger(8, unsigned=True), nullable=False,
primary_key=True, autoincrement=True)
_name = db.Column(db.String(255), server_default='',
unique=True, name="name")
_description = db.Column(db.Text, nullable=False,
name="description", default="")
_kbtype = db.Column(db.Char(1), nullable=True, default='w', name="kbtype")
slug = db.Column(db.String(255), unique=True, nullable=False, default="")
# Enable or disable the access from REST API
is_api_accessible = db.Column(db.Boolean, default=True, nullable=False)
@db.hybrid_property
def name(self):
"""Get name."""
return self._name
@name.setter
def name(self, value):
"""Set name and generate the slug."""
self._name = value
# generate slug
if not self.slug:
self.slug = KnwKB.generate_slug(value)
@db.hybrid_property
def description(self):
"""Get description."""
return self._description
@description.setter
def description(self, value):
"""Set description."""
# TEXT in mysql don't support default value
# @see http://bugs.mysql.com/bug.php?id=21532
self._description = value or ''
@db.hybrid_property
def kbtype(self):
"""Get kbtype."""
return self._kbtype
@kbtype.setter
def kbtype(self, value):
"""Set kbtype."""
if value is None:
# set the default value
return
# or set one of the available values
kbtype = value[0] if len(value) > 0 else 'w'
if kbtype not in ['t', 'd', 'w']:
raise ValueError('unknown type "{value}", please use one of \
following values: "taxonomy", "dynamic" or \
"written_as"'.format(value=value))
self._kbtype = kbtype
def is_dynamic(self):
"""Return true if the type is dynamic."""
return self._kbtype == 'd'
def to_dict(self):
"""Return a dict representation of KnwKB."""
mydict = {'id': self.id, 'name': self.name,
'description': self.description,
'kbtype': self.kbtype}
if self.kbtype == 'd':
mydict.update((self.kbdefs.to_dict() if self.kbdefs else {}) or {})
return mydict
def get_kbr_items(self, searchkey="", searchvalue="", searchtype='s'):
"""
Return dicts of 'key' and 'value' from a knowledge base.
:param kb_name the name of the knowledge base
:param searchkey search using this key
:param searchvalue search using this value
:param searchtype s=substring, e=exact, sw=startswith
:return a list of dictionaries [{'key'=>x, 'value'=>y},..]
"""
import warnings
warnings.warn("The function is deprecated. Please use the "
"`KnwKBRVAL.query_kb_mappings()` instead. "
"E.g. [kval.to_dict() for kval in "
"KnwKBRVAL.query_kb_mappings(kb_id).all()]")
if searchtype == 's' and searchkey:
searchkey = '%' + searchkey + '%'
if searchtype == 's' and searchvalue:
searchvalue = '%' + searchvalue + '%'
if searchtype == 'sw' and searchvalue: # startswith
searchvalue = searchvalue + '%'
if not searchvalue:
searchvalue = '%'
if not searchkey:
searchkey = '%'
kvals = KnwKBRVAL.query.filter(
KnwKBRVAL.id_knwKB.like(self.id),
KnwKBRVAL.m_value.like(searchvalue),
KnwKBRVAL.m_key.like(searchkey)).all()
return [kval.to_dict() for kval in kvals]
def get_kbr_values(self, searchkey="", searchvalue="", searchtype='s'):
"""
Return dicts of 'key' and 'value' from a knowledge base.
:param kb_name the name of the knowledge base
:param searchkey search using this key
:param searchvalue search using this value
:param searchtype s=substring, e=exact, sw=startswith
:return a list of dictionaries [{'key'=>x, 'value'=>y},..]
"""
import warnings
warnings.warn("The function is deprecated. Please use the "
"`KnwKBRVAL.query_kb_mappings()` instead. "
"E.g. [(kval.m_value,) for kval in "
"KnwKBRVAL.query_kb_mappings(kb_id).all()]")
# prepare filters
if searchtype == 's':
searchkey = '%' + searchkey + '%'
if searchtype == 's' and searchvalue:
searchvalue = '%' + searchvalue + '%'
if searchtype == 'sw' and searchvalue: # startswith
searchvalue = searchvalue + '%'
if not searchvalue:
searchvalue = '%'
# execute query
return db.session.execute(
db.select([KnwKBRVAL.m_value],
db.and_(KnwKBRVAL.id_knwKB.like(self.id),
KnwKBRVAL.m_value.like(searchvalue),
KnwKBRVAL.m_key.like(searchkey))))
@session_manager
def set_dyn_config(self, field, expression, collection=None):
"""Set dynamic configuration."""
if self.kbdefs:
# update
self.kbdefs.output_tag = field
self.kbdefs.search_expression = expression
self.kbdefs.collection = collection
db.session.merge(self.kbdefs)
else:
# insert
self.kbdefs = KnwKBDDEF(output_tag=field,
search_expression=express
|
ion,
collection=collection)
@staticmethod
def generate_slug(name):
"""Generate a slug for
|
the knowledge.
:param name: text to slugify
:return: slugified text
"""
slug = slugify(name)
i = KnwKB.query.filter(db.or_(
KnwKB.slug.like(slug),
KnwKB.slug.like(slug + '-%'),
)).count()
return slug + ('-{0}'.format(i) if i > 0 else '')
@staticmethod
def exists(kb_name):
"""Return True if a kb with the given name exists.
:param kb_name: the name of the knowledge base
:return: True if kb exists
"""
return KnwKB.query_exists(KnwKB.name.like(kb_name))
@staticmethod
def query_exists(filters):
"""Return True if a kb with the given filters exists.
E.g: KnwKB.query_exists(KnwKB.name.like('FAQ'))
:param filters: filter for sqlalchemy
:return: True if kb exists
"""
return db.session.query(
KnwKB.query.filter(
|
EduPepperPDTesting/pepper2013-testing
|
lms/djangoapps/reportlab/graphics/barcode/__init__.py
|
Python
|
agpl-3.0
| 5,911
| 0.011673
|
#
# Copyright (c) 1996-2000 Tyler C. Sarna <tsarna@sarna.org>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. All advertising materials mentioning features or use of this software
# must display the following acknowledgement:
# This product includes software developed by Tyler C. Sarna.
# 4. Neither the name of the author nor the names of contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS
# BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF AD
|
VISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
__all__ = tuple('''registerWidget getCodes getCodeNames createBarcodeDrawing createBarcodeImageInMemory'''.split())
__version__ = '0.9'
__doc__='''Popular barcodes available as reusable widgets'''
_widgets = []
def registerWidget(widget):
_widgets.append(widget)
def _reset():
_widgets[:] = []
|
from reportlab.graphics.barcode.widgets import BarcodeI2of5, BarcodeCode128, BarcodeStandard93,\
BarcodeExtended93, BarcodeStandard39, BarcodeExtended39,\
BarcodeMSI, BarcodeCodabar, BarcodeCode11, BarcodeFIM,\
BarcodePOSTNET, BarcodeUSPS_4State, BarcodeCode128Auto, BarcodeECC200DataMatrix
#newer codes will typically get their own module
from reportlab.graphics.barcode.eanbc import Ean13BarcodeWidget, Ean8BarcodeWidget, UPCA, Ean5BarcodeWidget, ISBNBarcodeWidget
from reportlab.graphics.barcode.qr import QrCodeWidget
for widget in (BarcodeI2of5,
BarcodeCode128,
BarcodeCode128Auto,
BarcodeStandard93,
BarcodeExtended93,
BarcodeStandard39,
BarcodeExtended39,
BarcodeMSI,
BarcodeCodabar,
BarcodeCode11,
BarcodeFIM,
BarcodePOSTNET,
BarcodeUSPS_4State,
Ean13BarcodeWidget,
Ean8BarcodeWidget,
UPCA,
Ean5BarcodeWidget,
ISBNBarcodeWidget,
QrCodeWidget,
BarcodeECC200DataMatrix,
):
registerWidget(widget)
_reset()
from reportlab.rl_config import register_reset
register_reset(_reset)
def getCodes():
"""Returns a dict mapping code names to widgets"""
#the module exports a dictionary of names to widgets, to make it easy for
#apps and doc tools to display information about them.
codes = {}
for widget in _widgets:
codeName = widget.codeName
codes[codeName] = widget
return codes
def getCodeNames():
"""Returns sorted list of supported bar code names"""
return sorted(getCodes().keys())
def createBarcodeDrawing(codeName, **options):
"""This creates and returns a drawing with a barcode.
"""
from reportlab.graphics.shapes import Drawing, Group
codes = getCodes()
bcc = codes[codeName]
width = options.pop('width',None)
height = options.pop('height',None)
isoScale = options.pop('isoScale',0)
kw = {}
for k,v in options.items():
if k.startswith('_') or k in bcc._attrMap: kw[k] = v
bc = bcc(**kw)
#Robin's new ones validate when setting the value property.
#Ty Sarna's old ones do not. We need to test.
if hasattr(bc, 'validate'):
bc.validate() #raise exception if bad value
if not bc.valid:
raise ValueError("Illegal barcode with value '%s' in code '%s'" % (options.get('value',None), codeName))
#size it after setting the data
x1, y1, x2, y2 = bc.getBounds()
w = float(x2 - x1)
h = float(y2 - y1)
sx = width not in ('auto',None)
sy = height not in ('auto',None)
if sx or sy:
sx = sx and width/w or 1.0
sy = sy and height/h or 1.0
if isoScale:
if sx<1.0 and sy<1.0:
sx = sy = max(sx,sy)
else:
sx = sy = min(sx,sy)
w *= sx
h *= sy
else:
sx = sy = 1
#bc.x = -sx*x1
#bc.y = -sy*y1
d = Drawing(width=w,height=h,transform=[sx,0,0,sy,-sx*x1,-sy*y1])
d.add(bc, "_bc")
return d
def createBarcodeImageInMemory(codeName,**options):
"""This creates and returns barcode as an image in memory.
Takes same arguments as createBarcodeDrawing and also an
optional format keyword which can be anything acceptable
to Drawing.asString eg gif, pdf, tiff, py ......
"""
format = options.pop('format','png')
d = createBarcodeDrawing(codeName, **options)
return d.asString(format)
|
snehasi/servo
|
tests/wpt/web-platform-tests/referrer-policy/generic/subresource/subresource.py
|
Python
|
mpl-2.0
| 3,167
| 0.011683
|
import os, sys, json, urlparse, urllib
def get_template(template_basename):
script_directory = os.path.dirname(os.path.abspath(__file__))
template_directory = os.path.abspath(os.path.join(script_directory,
"..",
"template"))
template_filename = os.path.join(template_directory, template_basename);
with open(template_filename, "r") as f:
return f.read()
# TODO(kristijanburnik): subdomain_prefix is a hardcoded value aligned with
# referrer-policy-test-case.js. The prefix should be configured in one place.
def get_swapped_origin_netloc(netloc, subdomain_prefix = "www1."):
if netloc.startswith(subdomain_prefix):
return netloc[len(subdomain_prefix):]
else:
return subdomain_prefix + netloc
def create_redirect_url(request, cross_origin = False):
parsed = urlparse.urlsplit(request.url)
destination_netloc = parsed.netloc
if cross_origin:
destination_netloc = get_swapped_origin_netloc(parsed.ne
|
tloc)
destination_url = urlparse.urlunsplit(urlparse.SplitResult(
scheme = parsed.scheme,
netloc = destination_netloc,
path = parsed.path,
query = None,
fragment = None))
return destination_url
def redirect(url, response):
response.add_required_headers = False
response.writer.write_status(301)
response.writer.write_header("access-control-al
|
low-origin", "*")
response.writer.write_header("location", url)
response.writer.end_headers()
response.writer.write("")
def preprocess_redirection(request, response):
if "redirection" not in request.GET:
return False
redirection = request.GET["redirection"]
if redirection == "no-redirect":
return False
elif redirection == "keep-origin-redirect":
redirect_url = create_redirect_url(request, cross_origin = False)
elif redirection == "swap-origin-redirect":
redirect_url = create_redirect_url(request, cross_origin = True)
else:
raise ValueError("Invalid redirection type '%s'" % redirection)
redirect(redirect_url, response)
return True
def __noop(request, response):
return ""
def respond(request,
response,
status_code = 200,
content_type = "text/html",
payload_generator = __noop,
cache_control = "no-cache; must-revalidate",
access_control_allow_origin = "*"):
if preprocess_redirection(request, response):
return
response.add_required_headers = False
response.writer.write_status(status_code)
if access_control_allow_origin != None:
response.writer.write_header("access-control-allow-origin",
access_control_allow_origin)
response.writer.write_header("content-type", content_type)
response.writer.write_header("cache-control", cache_control)
response.writer.end_headers()
server_data = {"headers": json.dumps(request.headers, indent = 4)}
payload = payload_generator(server_data)
response.writer.write(payload)
|
peteboyd/lammps_interface
|
lammps_interface/Molecules.py
|
Python
|
mit
| 18,379
| 0.005985
|
"""
Molecule class.
"""
import numpy as np
from .water_models import SPC_E_atoms, TIP3P_atoms, TIP4P_atoms, TIP5P_atoms
from .gas_models import EPM2_atoms
from .structure_data import MolecularGraph
import networkx as nx
class Molecule(MolecularGraph):
#TODO(pboyd):add bonding calculations for the atoms in each molecular template.
# so we can add bond/angle/dihedral/improper potentials later on.
def rotation_from_vectors(self, v1, v2):
"""Obtain rotation matrix from sets of vectors.
the original set is v1 and the vectors to rotate
to are v2.
"""
# v2 = transformed, v1 = neutral
ua = np.array([np.mean(v1.T[0]), np.mean(v1.T[1]), np.mean(v1.T[2])])
ub = np.array([np.mean(v2.T[0]), np.mean(v2.T[1]), np.mean(v2.T[2])])
Covar = np.dot((v2 - ub).T, (v1 - ua))
try:
u, s, v = np.linalg.svd(Covar)
uv = np.dot(u,v[:3])
d = np.identity(3)
d[2,2] = np.linalg.det(uv) # ensures non-reflected solution
M = np.dot(np.dot(u,d), v)
return M
except np.linalg.linalg.LinAlgError:
return np.identity(3)
def rotation_matrix(self, axis, angle):
"""
returns a 3x3 rotation matrix based on the
provided axis and angle
"""
axis = np.array(axis)
axis = axis / np.linalg.norm(axis)
a = np.cos(angle / 2.)
b, c, d = -axis*np.sin(angle / 2.)
R = np.array([[a*a + b*b - c*c - d*d, 2*(b*c - a*d), 2*(b*d + a*c)],
[2*(b*c + a*d), a*a + c*c - b*b - d*d, 2*(c*d - a*b)],
[2*(b*d - a*c), 2*(c*d + a*b), a*a + d*d - b*b - c*c]])
return R
def compute_all_angles(self):
self.compute_angles()
self.compute_dihedrals()
self.compute_improper_dihedrals()
def str(self, atom_types={}, bond_types={}, angle_types={}, dihedral_types={}, improper_types={}):
""" Create a molecule template string for writing to a file.
Ideal for using fix gcmc or fix deposit in LAMMPS.
"""
line = "# %s\n\n"%(self._type_)
line += "%6i atoms\n"%len(self)
if(self.number_of_edges()):
line += "%6i bonds\n"%self.number_of_edges()
if(self.count_angles()):
line += "%6i angles\n"%(self.count_angles())
if(self.count_dihedrals()):
line += "%6i dihedrals\n"%(self.count_dihedrals())
if(self.count_impropers()):
line += "%6i impropers\n"%(self.count_impropers())
#line += "%12.5f mass"%()
#line += "%12.5f %12.5f %12.5f com"%()
line += "\nCoords\n\n"
for node, data in self.nodes_iter2(data=True):
line += "%6i %12.5f %12.5f %12.5f\n"%(tuple ([node]+data['cartesian_coordinates'].tolist()))
line += "\nTypes\n\n"
for node, data in self.nodes_iter2(data=True):
|
line += "%6i %6i # %s\n"%(node, data['ff_type_index'], data['force_field_type'])
line += "\nCharges\n\n"
for node, data in self.nodes_iter2(data=True):
line += "%6i %12.5f\n"%(node, data['charge'])
#TODO(pboyd): add bonding, angles, dihedra
|
ls, impropers, etc.
if self.number_of_edges():
line += "\nBonds\n\n"
count = 0
for n1, n2, data in self.edges_iter2(data=True):
count += 1
line += "%6i %6i %6i %6i # %s %s\n"%(count, data['ff_type_index'], n1, n2,
self.node[n1]['force_field_type'],
self.node[n2]['force_field_type'])
if self.count_angles():
line += "\nAngles\n\n"
count = 0
for b, data in self.nodes_iter2(data=True):
try:
ang_data = data['angles']
for (a, c), val in ang_data.items():
count += 1
line += "%6i %6i %6i %6i %6i # %s %s(c) %s\n"%(count,
val['ff_type_index'], a, b, c,
self.node[a]['force_field_type'],
self.node[b]['force_field_type'],
self.node[c]['force_field_type'])
except KeyError:
pass
if self.count_dihedrals():
line += "\nDihedrals\n\n"
count = 0
for b, c, data in self.edges_iter2(data=True):
try:
dihed_data = data['dihedrals']
for (a, d), val in dihed_data.items():
count += 1
line += "%6i %6i %6i %6i %6i %6i # %s %s(c) %s(c) %s\n"%(count,
val['ff_type_index'], a, b, c, d,
self.node[a]['force_field_type'],
self.node[b]['force_field_type'],
self.node[c]['force_field_type'],
self.node[d]['force_field_type'])
except KeyError:
pass
if self.count_impropers():
line += "\nImpropers\n\n"
count = 0
for b, data in self.nodes_iter2(data=True):
try:
imp_data = data['impropers']
for (a, c, d), val in imp_data.items():
count += 1
line += "%6i %6i %6i %6i %6i %6i # %s %s (c) %s %s\n"%(count,
val['ff_type_index'], a, b, c, d,
self.node[a]['force_field_type'],
self.node[b]['force_field_type'],
self.node[c]['force_field_type'],
self.node[d]['force_field_type'])
except KeyError:
pass
return line
@property
def _type_(self):
return self.__class__.__name__
class CO2(Molecule):
"""Carbon dioxide parent class, containing functions applicable
to all CO2 models.
"""
@property
def O_coord(self):
"""Define the oxygen coordinates assuming carbon is centered at '0'.
angle gives the two oxygen atoms an orientation that deviates randomly
from the default (lying along the x-axis).
"""
try:
return self._O_coord
except AttributeError:
self._O_coord = self.RCO*np.array([[-1., 0., 0.],[1., 0., 0.]])
#if angle == 0.:
# return self._O_coord
#else:
# generate a random axis for rotation.
axis = np.random.rand(3)
angle = 180.*np.random.rand()
# rotate using the angle provided.
R = self.rotation_matrix(axis, np.radians(angle))
self._O_coord = np.dot(self._O_coord, R.T)
return self._O_coord
def approximate_positions(self, C_pos=None, O_pos1=None, O_pos2=None):
"""Input a set of approximate positions for the carbon
and oxygens of CO2, and determine the lowest RMSD
that would give the idealized model.
"""
C = self.C_coord
O1 = self.O_coord[0]
O2 = self.O_coord[1]
v1 = np.array([C, O1, O2])
v2 = np.array([C_pos, O_pos1, O_pos2])
R = self.rotation_from_vectors(v1, v2)
self.C_coord = C_pos
self._O_coord = np.dot(self._O_coord, R.T) + C_pos
for n in self.nodes_iter2():
if n == 1:
self.node[n]['cartesian_coordinates'] = self.C_coord
elif n == 2:
self.node[n]['cartesian_coordinates'] = self.O_coord[0]
elif n == 3:
self.node[n]['cartesian_coordinates'] = self.O_coord[1]
class Water(Molecule):
"""Water pa
|
pkuhad/django-student
|
students/urls.py
|
Python
|
mit
| 684
| 0.010234
|
from django.conf.urls import patterns, include, url
# Uncomment the next two lines to enable the admin:
# from django.contrib
|
import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examp
|
les:
# url(r'^$', 'students.views.home', name='home'),
# url(r'^students/', include('students.foo.urls')),
# App 'Index Classic'
url(r"^index_classic/", include('students.index_classic.urls'), name="index_classic"),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
|
jeffamstutz/rfManta
|
scenes/csafe/CMake/make2cmake.py
|
Python
|
mit
| 768
| 0.011719
|
#!/usr/bin/python
import os,sys,string
if __name__ == "__main__":
# Check the argument list size
if (len(sys.argv) < 3):
sys.stderr.write("USAGE: " + sys.argv[0] + " <input file> <output file>\n")
sys.exit(1)
infilename = sys.argv[1]
outfilename = sys.argv[2]
# U is the mode to open the file with universal newline support.
infile = open(infilename, "rU")
outfile = open(outfile
|
name, "w")
# Now read the lines
lines = map( lambda x: string.strip(x, string.whitespace+"\\"),
infile.readlines() )
infile.close(
|
)
# Now write them
outfile.write("SET( MANTA_SWIG_DEPEND\n")
for l in lines[1:]:
outfile.write(l + "\n")
outfile.write(")\n")
outfile.close()
|
yunstanford/sanic
|
examples/request_stream/server.py
|
Python
|
mit
| 1,683
| 0
|
from sanic import Sanic
from sanic.views import Compo
|
sitionView
from sanic.views import HTTPMethodView
from sanic.views import stream as stream_decorator
from sanic.blueprints import Blueprint
from sanic.response import stream, text
bp = Blueprint('blueprint_request_stream')
app = Sanic('request_stream')
class SimpleView(HTTPMethodView):
@stream_decorator
async def post(self, request):
result = ''
while True:
body = await
|
request.stream.get()
if body is None:
break
result += body.decode('utf-8')
return text(result)
@app.post('/stream', stream=True)
async def handler(request):
async def streaming(response):
while True:
body = await request.stream.get()
if body is None:
break
body = body.decode('utf-8').replace('1', 'A')
await response.write(body)
return stream(streaming)
@bp.put('/bp_stream', stream=True)
async def bp_handler(request):
result = ''
while True:
body = await request.stream.get()
if body is None:
break
result += body.decode('utf-8').replace('1', 'A')
return text(result)
async def post_handler(request):
result = ''
while True:
body = await request.stream.get()
if body is None:
break
result += body.decode('utf-8')
return text(result)
app.blueprint(bp)
app.add_route(SimpleView.as_view(), '/method_view')
view = CompositionView()
view.add(['POST'], post_handler, stream=True)
app.add_route(view, '/composition_view')
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8000)
|
jinankjain/zamboni
|
mkt/collections/tests/test_authorization.py
|
Python
|
bsd-3-clause
| 8,106
| 0.00037
|
import json
from urllib import urlencode
from nose.tools import ok_
from rest_framework.generics import GenericAPIView
from rest_framework.request import Request
from rest_framework.settings import api_settings
from access.middleware import ACLMiddleware
from amo.tests import TestCase
from users.models import UserProfile
from mkt.collections.authorization import (CanBeHeroAuthorization,
CuratorAuthorization,
StrictCuratorAuthorization)
from mkt.collections.tests import CollectionTestMixin
from mkt.site.fixtures import fixture
from test_utils import RequestFactory
class TestCuratorAuthorization(CollectionTestMixin, TestCase):
auth_class = CuratorAuthorization
fixtures = fixture('user_2519')
def setUp(self):
super(TestCuratorAuthorization, self).setUp()
self.collection = self.make_collection()
self.auth = self.auth_class()
self.user = UserProfile.objects.get(pk=2519)
self.profile = self.user
self.view = GenericAPIView()
def give_permission(self):
self.grant_permission(self.profile, 'Collections:Curate')
def make_curator(self):
self.collection.add_curator(self.profile)
def request(self, verb):
request = getattr(RequestFactory(), verb.lower())('/')
request.user = self.user
ACLMiddleware().process_request(request)
return request
def is_authorized(self, request):
return self.auth.has_permission(request, self.view)
def is_authorized_object(self, request):
return self.auth.has_object_permission(request, self.view,
self.collection)
def test_get_list(self):
ok_(self.is_authorized(self.request('GET')))
def test_get_list_permission(self):
self.give_permission()
ok_(self.is_authorized(self.request('GET')))
def test_post_list(self):
ok_(not self.is_authorized(self.request('POST')))
def test_post_list_permission(self):
self.give_permission()
ok_(self.is_authorized(self.request('POST')))
def test_delete_list(self):
ok_(not self.is_authorized(self.request('DELETE')))
def test_delete_list_permission(self):
self.give_permission()
ok_(self.is_authorized(self.request('DELETE')))
def test_get_detail(self):
ok_(self.is_authorized_object(self.request('GET')))
def test_get_detail_permission(self):
self.give_permission()
ok_(self.is_authorized_object(self.request('GET')))
def test_get_detail_curator(self):
self.make_curator()
ok_(self.is_authorized_object(self.request('GET')))
def test_get_detail_permission_curator(self):
self.give_permission()
self.make_curator()
ok_(self.is_authorized_object(self.request('GET')))
def test_post_detail(self):
ok_(not self.is_authorized_object(self.request('POST')))
def test_post_detail_permission(self):
self.give_permission()
ok_(self.is_authorized_object(self.request('POST')))
def test_post_detail_curator(self):
self.make_curator()
ok_(self.is_authorized_object(self.request('POST')))
def test_post_detail_permission_curator(self):
self.give_permission()
self.make_curator()
ok_(self.is_authorized_object(self.request('POST')))
def test_delete_detail(self):
ok_(not self.is_authorized_object(self.request('DELETE')))
def test_delete_detail_permission(self):
self.give_permission()
ok_(self.is_authorized_object(self.request('DELETE')))
def test_delete_detail_curator(self):
self.make_curat
|
or()
ok_(not self.is_authorized_object(self.request('DELETE')))
def test_delete_detail_permission_curator(self):
self.give_permission()
self.make_curator()
ok_(self.is_authorized_ob
|
ject(self.request('DELETE')))
class TestStrictCuratorAuthorization(TestCuratorAuthorization):
auth_class = StrictCuratorAuthorization
def test_get_list(self):
ok_(not self.is_authorized(self.request('GET')))
def test_get_detail(self):
ok_(not self.is_authorized_object(self.request('GET')))
class TestCanBeHeroAuthorization(CollectionTestMixin, TestCase):
enforced_verbs = ['POST', 'PUT']
fixtures = fixture('user_2519')
def setUp(self):
super(TestCanBeHeroAuthorization, self).setUp()
self.collection = self.make_collection()
self.auth = CanBeHeroAuthorization()
self.user = UserProfile.objects.get(pk=2519)
self.profile = self.user
self.view = GenericAPIView()
def give_permission(self):
self.grant_permission(self.profile, 'Collections:Curate')
def is_authorized_object(self, request):
return self.auth.has_object_permission(request, self.view,
self.collection)
def request(self, verb, qs=None, content_type='application/json',
encoder=json.dumps, **data):
if not qs:
qs = ''
request = getattr(RequestFactory(), verb.lower())
request = request('/?' + qs, content_type=content_type,
data=encoder(data) if data else '')
request.user = self.user
ACLMiddleware().process_request(request)
return Request(request, parsers=[parser_cls() for parser_cls in
api_settings.DEFAULT_PARSER_CLASSES])
def test_unenforced(self):
"""
Should always pass for GET requests.
"""
ok_(self.is_authorized_object(self.request('GET')))
def test_no_qs_modification(self):
"""
Non-GET requests should not be rejected if there is a can_be_true
querystring param (which hypothetically shouldn't do anything).
We're effectively testing that request.GET doesn't bleed into
request.POST.
"""
self.give_permission()
for verb in self.enforced_verbs:
request = self.request(verb, qs='can_be_hero=1')
ok_(not self.auth.hero_field_modified(request), verb)
def test_change_permission(self):
"""
Should pass if the user is attempting to modify the can_be_hero field
and has the permission.
"""
self.give_permission()
for verb in self.enforced_verbs:
request = self.request(verb, can_be_hero=True)
ok_(self.auth.hero_field_modified(request), verb)
def test_change_permission_urlencode(self):
"""
Should pass if the user is attempting to modify the can_be_hero field
and has the permission.
"""
self.give_permission()
for verb in self.enforced_verbs:
request = self.request(verb, encoder=urlencode,
content_type='application/x-www-form-urlencoded',
can_be_hero=True)
ok_(self.auth.hero_field_modified(request), verb)
def test_no_change_no_permission(self):
"""
Should pass if the user does not have the permission and is not
attempting to modify the can_be_hero field.
"""
for verb in self.enforced_verbs:
request = self.request(verb)
ok_(self.is_authorized_object(request), verb)
def test_no_change(self):
"""
Should pass if the user does have the permission and is not attempting
to modify the can_be_hero field.
"""
self.give_permission()
for verb in self.enforced_verbs:
request = self.request(verb)
ok_(self.is_authorized_object(request), verb)
def test_post_change_no_permission(self):
"""
Should not pass if the user is attempting to modify the can_be_hero
field without the permission.
"""
for verb in self.enforced_verbs:
request = self.request(verb, can_be_hero=True)
ok_(not self.is_authorized_object(request), verb)
|
mrmookie/btcrecover
|
make-unicode.py
|
Python
|
gpl-2.0
| 4,149
| 0.002651
|
#!/usr/bin/python
# make-unicode.py -- build the Unicode version of btcrecover from the ASCII version
# Copyright (C) 2014, 2015 Christopher Gurnee
#
# This file is part of btcrecover.
#
# btcrecover is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version
# 2 of the License, or (at your option) any later version.
#
# btcrecover is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/
# If you find this program helpful, please consider a small
# donation to the developer at the following Bitcoin address:
#
# 17LGpN2z62zp7RS825jXwYtE7zZ19Mxxu8
#
# Thank You!
from __future__ import print_function
import os.path as path
install_dir = path.dirname(__file__)
# This is a bit fragile, but it's probably good enough. It simply looks
# for certain strings, and comments or uncomments code between them.
def make_unicode_version(ascii_name, unicode_name):
ascii_version_path = path.join(install_dir, ascii_name)
unicode_version_path = path.join(install_dir, unicode_name)
if not path.isfile(ascii_version_path):
exit("can't find " + ascii_version_path)
if path.isfile (unicode_version_path) and \
path.getmtime(unicode_version_path) >= path.getmtime(ascii_version_path):
print("existing Unicode version "+unicode_name+" is up-to-date")
return False
print("making "+unicode_name)
with open(ascii_version_path, "rb") as ascii_version:
with open(unicode_version_path, "wb") as unicode_version:
# Search for the first "key" string
for line in ascii_version:
unicode_version.write(line)
if line.startswith("# Uncomment for Unicode support"):
break
# Uncomment the block of code up until the next "key" string
for line in ascii_version:
if line.startswith("# Uncomment for ASCII-only support"):
unicode_version.write(line)
break
unicode_version.write(line[1:] if line.startswith("#") else line)
# Comment out the next block of code up until the first empty line
for line in ascii_version:
if line.strip() == "":
unicode_version.write(line)
break
unicode_version.write("#")
unicode_version.write(line)
# Copy the rest of the file
for line in ascii_version:
unicode_version.write(line)
return True
if __name__ == '__main__':
import argparse, atexit, unittest
parser = argparse.ArgumentParser()
parser.add_argument("--no-quicktests", action="store_true", help="don't run the QuickTests suite")
parser.add_argument("--no-pause", action="store_true", help="don't prompt 'Press Enter to exit'")
args = parser.parse_args()
# By default, pause before exiting
if not args.n
|
o_pause:
atexit.regist
|
er(lambda: raw_input("\nPress Enter to exit ..."))
# Build the Unicode versions of btcrecover and the test-btcr test suite
modified1 = make_unicode_version("btcrecover.py", "btcrecoveru.py")
modified2 = make_unicode_version("test-btcr.py", "test-btcru.py")
if not modified1 and not modified2:
print("nothing left to do, exiting")
# If at least one of the files were updated, by default run the QuickTests suite
elif not args.no_quicktests:
print("\nRunning quick tests\n")
test_btcr = __import__("test-btcru")
if unittest.TextTestRunner(buffer=True).run(test_btcr.QuickTests()).wasSuccessful():
print("\nStart test-btcru.py to run the full test suite.")
else:
exit(1)
|
jgrasser/pyRussianSquare
|
russianSquare.py
|
Python
|
gpl-2.0
| 27,299
| 0.0211
|
# RussianSquare - Tetris like block game
# Copyright (C) 2011 Joseph Grasser
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import os, pygame, random, time
from pygame.locals import *
if not pygame.font: print('Warning, fonts disabled')
if not pygame.mixer: print('Warning, sound disabled')
#------------------------------------------------------------------------------------------#
# Loading scripts Joseph Grasser #
#------------------------------------------------------------------------------------------#
# This section is where all the all the functions for loading the alphabet file and #
# Hollow Square images is defined #
# ------------------------------------
|
-----------------------------------------------------#
def load_alphabet(alphabet_file, charwidth, charheight):
letterImage = pygame.image.load( alphabet_file )
letters = []
height = letterImage.get_height()
width = letterImage.get_width()
x = 0
y = 0
while y+charheight < height and x < width:
|
letters.append( letterImage.subsurface(pygame.Rect(x, y, charwidth, charheight) ) )
y = y + charheight
return letters
def big_Alpha():
char_width = 32
char_height = 40
alpha = load_alphabet(os.path.join('data', 'abcdefghijkl_big.tga' ), char_width, char_height)
alpha.extend( load_alphabet(os.path.join('data', 'mnopqrstuvwx_big.tga' ), char_width, char_height) )
alpha.extend( load_alphabet(os.path.join('data', 'yzplus_big.tga' ), char_width, char_height) )
alpha.extend( load_alphabet(os.path.join('data', 'numeralsBig.tga' ), char_width, char_height) )
return alpha
def small_Alpha():
char_width = 16
char_height = 20
alpha = load_alphabet(os.path.join('data', 'abcdefghijkl.tga' ), char_width, char_height)
alpha.extend( load_alphabet(os.path.join('data', 'mnopqrstuvwx.tga' ), char_width, char_height) )
alpha.extend( load_alphabet(os.path.join('data', 'yzplus.tga' ), char_width, char_height) )
alpha.extend( load_alphabet(os.path.join('data', 'numerals.tga' ), char_width, char_height) )
return alpha
def colorfy(image, color):
for x in range(0, image.get_width()):
for y in range(0, image.get_height()):
pixel = image.get_at((x,y))
image.set_at((x,y) , (color[0], color[1], color[2], pixel[3]))
return image
def shrinkImages(images, scale_x=-1, scale_y=-1):
for x in range(0, len(images)):
images[x] = pygame.transform.scale(images[x], (scale_x, scale_y))
return images
#------------------------------------------------------------------------------------------#
# Global Variables Joseph Grasser #
#------------------------------------------------------------------------------------------#
# This section is where all the global variables are defined and initilizd. #
# -----------------------------------------------------------------------------------------#
windowDimension = (620, 500)
screen = pygame.display.set_mode(windowDimension)
scene = 0
letterKey = ['a','b','c','d','e','f','g','h', 'i', 'j', 'k','l','m','n','o','p','q','r','s','t','u','v','w','x','y','z', '+', ' ', '0','1','2','3','4','5','6','7','8','9','.']
#Sprite Images
black = 0, 0, 0
image1 = pygame.image.load( os.path.join('data', 'blue_indent.png' ) )
image2 = pygame.image.load( os.path.join('data', 'blue_shiny.png' ) )
image3 = pygame.image.load( os.path.join('data', 'green_dot.png' ) )
image4 = pygame.image.load( os.path.join('data', 'yellow_cross.png' ) )
image5 = pygame.image.load( os.path.join('data', 'red_circle.png' ) )
images = [image1, image2, image3, image4, image5]
images = shrinkImages(images, 40, 40)
big_alphabet = big_Alpha()
small_alphabet = small_Alpha()
#------------------------------------------------------------------------------------------#
# GUI Utilities Joseph Grasser #
#------------------------------------------------------------------------------------------#
# Functions for drawing text, and borders on the screen. #
# -----------------------------------------------------------------------------------------#
def init():
pygame.init()
pygame.display.set_caption('Russian Squares v1.0')
pygame.mouse.set_visible(1)
def addText(screen, alpha, text, location):
x = location[0]
y = location[1]
text = text.lower()
for i in range(0, len(text)):
letter = text[i]
key = letterKey.index(letter)
screen.blit(alpha[key], (x+i*alpha[key].get_width()/2,y))
def addParagraph(screen, title, text, position):
addText(screen, big_alphabet, title, position)
text = text.split('\n')
y = 1
for line in text:
y = y + 1
addText(screen, small_alphabet, line, (position[0], position[1] + 16*y))
def drawBorder(Surface):
paneWidth = Surface.get_width()
paneHeight = Surface.get_height()
borderTop = pygame.image.load(os.path.join('data','gridLineTop.tga'))
borderBottom = pygame.image.load(os.path.join('data','gridLineBottom.tga'))
borderLeft = pygame.image.load(os.path.join('data','gridLineLeft.tga'))
borderRight = pygame.image.load(os.path.join('data','gridLineRight.tga'))
for x in range( 0, paneWidth // 40):
for y in range( 0, paneHeight // 40):
if x == 0 and y >= 0:
Surface.blit(borderLeft, (0, y*40))
if x >= 0 and y == 0:
Surface.blit(borderTop, (x*40,0))
if x+1 == paneWidth // 40 and y >= 0:
Surface.blit(borderRight, ((x)*40, y*40))
if x >= 0 and y+1 == paneHeight // 40:
Surface.blit(borderBottom, (x*40, (y)*40))
#------------------------------------------------------------------------------------------#
# GUI Components Joseph Grasser #
#------------------------------------------------------------------------------------------#
# Section contains the following gui components: Menus, Enterboxes, and Scoreboards #
# -----------------------------------------------------------------------------------------#
class GUI_Menu(pygame.Surface):
def __init__(self, commands, width, alphabet, border = 1):
pygame.Surface.__init__(self, (width+40, 20+len(commands)*alphabet[0].get_height()), SRCALPHA)
self.commands = commands
self.alphabet = alphabet
self.border = border
self.printOptions()
self.selectionSquare = self.load_selection_square()
self.angle = 0
self.index = 0
def load_selection_square(self):
image = pygame.image.load(os.path.join('data', 'Hammer_and_sickle.png'))
image = pygame.transform.scale(image, (self.alphabet[0].get_height(), self.alphabet[0].get_height()))
return image
def up(self):
if self.index > 0:
self.index -= 1
def down(self):
if self.index+1 < len(self.commands):
self.index += 1
def printOptions(self):
y = 10
for x in range(0, len(self.commands)):
addText(self, self.alphabet, self.commands[x], (40, y + self.alphabet[0].get_height()*x) )
def update(self):
self.fill(black)
|
Jaymebb/MIE
|
MIE/UTILS/quickumls.py
|
Python
|
mit
| 10,762
| 0.000093
|
# future statements for Python 2 compatibility
from __future__ import (
unicode_literals, division, print_function, absolute_import)
# built in modules
import os
import sys
import datetime
# installed modules
import spacy
from unidecode import unidecode
# project modules
try:
import toolbox
import constants
except ImportError:
from . import toolbox
from . import constants
class QuickUMLS(object):
def __init__(
self, quickumls_fp,
overlapping_criteria='score', threshold=0.7, window=5,
similarity_name='jaccard', min_match_length=3,
accepted_semtypes=constants.ACCEPTED_SEMTYPES,
verbose=False):
self.verbose = verbose
valid_criteria = {'length', 'score'}
err_msg = ('"{}" is not a valid overlapping_criteria. Choose '
'between {}'.format(
overlapping_criteria, ', '.join(valid_criteria)))
assert overlapping_criteria in valid_criteria, err_msg
self.overlapping_criteria = overlapping_criteria
valid_similarities = {'dice', 'jaccard', 'cosine', 'overlap'}
err_msg = ('"{}" is not a valid similarity name. Choose between '
'{}'.format(similarity_name, ', '.join(valid_similarities)))
assert not(valid_similarities in valid_similarities), err_msg
self.similarity_name = similarity_name
simstring_fp = os.path.join(quickumls_fp, 'umls-simstring.db')
cuisem_fp = os.path.join(quickumls_fp, 'cui-semtypes.db')
self.valid_punct = constants.UNICODE_DASHES
self.negations = constants.NEGATIONS
self.window = window
self.ngram_length = 3
self.threshold = threshold
self.min_match_length = min_match_length
self.to_lowercase_flag = os.path.exists(
os.path.join(quickumls_fp, 'lowercase.flag')
)
self.normalize_unicode_flag = os.path.exists(
os.path.join(quickumls_fp, 'normalize-unicode.flag')
)
self._info = None
self.accepted_semtypes = accepted_semtypes
self.ss_db = toolbox.SimstringDBReader(
simstring_fp, similarity_name, threshold
)
self.cuisem_db = toolbox.CuiSemTypesDB(cuisem_fp)
self.nlp = spacy.load('en')
def get_info(self):
return self.info
def get_accepted_semtypes(self):
return self.accepted_semtypes
@property
def info(self):
# useful for caching of respnses
if self._info is None:
self._info = {
'threshold': self.threshold,
'similarity_name': self.similarity_name,
'window': self.window,
'ngram_length': self.ngram_length,
'min_match_length': self.min_match_length,
'accepted_semtypes': sorted(self.accepted_semtypes),
'negations': sorted(self.negations),
'valid_punct': sorted(self.valid_punct)
}
return self._info
def _is_valid_token(self, tok):
return not(
tok.is_punct or tok.is_space or
tok.pos_ == 'ADP' or tok.pos_ == 'DET' or tok.pos_ == 'CONJ'
)
def _is_valid_start_token(self, tok):
return not(
tok.like_num or
(self._is_stop_term(tok) and tok.lemma_ not in self.negations) or
tok.pos_ == 'ADP' or tok.pos_ == 'DET' or tok.pos_ == 'CONJ'
)
def _is_stop_term(self, tok):
return tok.is_stop or tok.lemma_ == 'time'
def _is_valid_end_token(self, tok):
return not(
tok.is_punct or tok.is_space or self._is_stop_term(tok) or
tok.pos_ == 'ADP' or tok.pos_ == 'DET' or tok.pos_ == 'CONJ'
)
def _is_valid_middle_token(self, tok):
return (
not(tok.is_punct or tok.is_space) or
tok.is_bracket or
tok.text in self.valid_punct
)
def _is_ok_semtype(self, target_semtypes):
if self.accepted_semtypes is None:
ok = True
else:
ok = any(sem in self.accepted_semtypes for sem in target_semtypes)
return ok
def _is_longer_than_min(self, span):
return (span.end_char - span.start_char) >= self.min_match_length
def _make_ngrams(self, sent):
sent_length = len(sent)
# do not include teterminers inside a span
skip_in_span = {token.i for token in sent if token.pos_ == 'DET'}
# invalidate a span if it includes any on these symbols
invalid_mid_tokens = {
token.i for token in sent if not self._is_valid_middle_token(token)
}
for i in toolbox.xrange3(sent_length):
tok = sent[i]
if not self._is_valid_token(tok):
continue
# do not consider this token by itself if it is
# a number or a stopword.
if self._is_valid_start_token(tok):
compensate = False
else:
compensate = True
span_end = min(sent_length, i + self.window) + 1
# we take a shortcut if the token is the last one
# in the sentence
if (
i + 1 == sent_length and # it's the last token
self._is_valid_end_token(tok) and # it's a valid end token
len(tok) >= self.min_match_length # it's of miminum length
):
yield(tok.idx, tok.idx + len(tok), tok.text)
for j in toolbox.xrange3(i + 1, span_end):
if compensate:
compensate = False
continue
if sent[j - 1] in invalid_mid_tokens:
break
if not self._is_valid_end_token(sent[j - 1]):
continue
span = sent[i:j]
if not self._is_longer_than_min(span):
continue
yield (
span.start_char, span.end_char,
''.join(token.text_with_ws for token in span
if token.i not in skip_in_span).strip()
)
def _get_all_matches(self, ngrams):
matches = []
for start, end, ngram in ngrams:
ngram_normalized = ngram
if self.normalize_unicode_flag:
ngram_normalized = unidecode(ngram_normalized)
# make it lowercase
if self.to_lowercase_flag:
ngram_normalized = ngram_normalized.lower()
# if the term is all uppercase, it might be the case that
# no match is found; so we convert to lowercase;
# however, this is never needed if the string is lowe
|
rcased
# in the step above
if not self.to_lowercase_flag and ngram_normalized.isupper():
ngram_normalized = ngram_normalized.lower()
prev_cui = None
|
ngram_cands = list(self.ss_db.get(ngram_normalized))
ngram_matches = []
for match in ngram_cands:
cuisem_match = sorted(self.cuisem_db.get(match))
for cui, semtypes, preferred in cuisem_match:
match_similarity = toolbox.get_similarity(
x=ngram_normalized,
y=match,
n=self.ngram_length,
similarity_name=self.similarity_name
)
if not self._is_ok_semtype(semtypes):
continue
if prev_cui is not None and prev_cui == cui:
if match_similarity > ngram_matches[-1]['similarity']:
ngram_matches.pop(-1)
else:
continue
prev_cui = cui
ngram_matches.append(
{
'start': start,
'end': end,
'ngram': ngram,
'term': toolbox.safe_un
|
Yajo/maintainer-tools
|
template/module/tests/test_something.py
|
Python
|
agpl-3.0
| 1,454
| 0
|
# Copyright <YEAR(S)> <AUTHOR(S)>
# License AGPL-
|
3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo.tests.common import HttpCase, TransactionCase
class SomethingCase(TransactionCase):
def setUp(self, *args, **kwargs):
super(SomethingCase, self).s
|
etUp(*args, **kwargs)
# TODO Replace this for something useful or delete this method
self.do_something_before_all_tests()
def tearDown(self, *args, **kwargs):
# TODO Replace this for something useful or delete this method
self.do_something_after_all_tests()
return super(SomethingCase, self).tearDown(*args, **kwargs)
def test_something(self):
"""First line of docstring appears in test logs.
Other lines do not.
Any method starting with ``test_`` will be tested.
"""
pass
class UICase(HttpCase):
post_install = True
at_install = False
def test_ui_web(self):
"""Test backend tests."""
self.phantom_js(
"/web/tests?debug=assets&module=module_name",
"",
login="admin",
)
def test_ui_website(self):
"""Test frontend tour."""
self.phantom_js(
url_path="/?debug=assets",
code="odoo.__DEBUG__.services['web.Tour']"
".run('test_module_name', 'test')",
ready="odoo.__DEBUG__.services['web.Tour'].tours.test_module_name",
login="admin")
|
StupidTortoise/personal
|
python/oracle_test.py
|
Python
|
gpl-2.0
| 1,079
| 0.016684
|
# -*- coding: utf-8 -*-
import cx_Oracle
db = cx_Oracle.connect("username", "password", "10.17.1.220:1521/db")
cursor = db.cursor()
cursor.execute("select loginid from sys_user")
for loginid in cursor:
print("loginid: ", loginid)
cursor.close()
cursor = db.cursor()
#插入一条记录
cursor.execute("""insert into tb_user values(1,'admin','password')""");
#再插入一条数据
param = {'id': 2, 'n': 'admin', 'p': 'password'}
cursor.execute('insert into tb_user values(:id,:n,:p)', param);
#一次插入多条数据,参数为字典列表形式
param = [{'id': 3, 'n': 'admin', 'p': 'password'}, {'id': 4, 'n': 'admin','p': 'password'}, {'id': 5, 'n': 'admin', 'p': 'password'}];
cursor.executemany('insert into tb_user values(:id,:n,:p)', param);
#再一次插入多条数据
param = [];
#生成5条插入数据,参数为元组列表形式
for i in rang
|
e(6, 11): # [6,7,8,9,10]
param.append((i, 'user
|
' + str(i), 'password' + str(i)))
#插入数据
cursor.executemany('insert into tb_user values(:1,:2,:3)', param);
cursor.close()
db.commit()
db.close()
|
pombredanne/neomodel
|
neomodel/cardinality.py
|
Python
|
mit
| 2,569
| 0.001557
|
from .relationship_manager import RelationshipManager, ZeroOrMore # noqa
class ZeroOrOne(RelationshipManager):
description = "zero or one relationship"
def single(self):
nodes = super(ZeroOrOne, self).all()
if len(nodes) == 1:
return nodes[0]
if len(nodes) > 1:
raise CardinalityViolation(self, len(nodes))
def all(self):
node = self.single()
return [node] if node else []
def connect(self, obj, properties=None):
if len(self):
raise AttemptedCardinalityViolation(
"Node already has {0} can't connect more".format(self))
else:
return super(ZeroOrOne, self).connect(obj, properties)
class OneOrMore(RelationshipManager):
description = "one or more relationships"
def single(self):
nodes = super(OneOrMore, self).all()
if nodes:
return nodes[0]
raise CardinalityViolation(self, 'none')
def all(self):
nodes = super(OneOrMore, self).all()
if nodes:
return nodes
raise CardinalityViolation(self, 'none')
def disconnect(self, obj):
if super(OneOrMore, self).__len__() < 2:
raise AttemptedCardinalityViolation("One or more expected")
return super(OneOrMore, self).disconnect(obj)
class One(RelationshipManager):
description = "one relationship"
def single(self):
nodes = super(One, self).all()
if nodes:
if len(nodes) == 1:
return nodes[0]
else:
raise CardinalityViolation(self, len(nodes))
else:
raise CardinalityViolation(self, 'none')
def all(self):
return [self.single()]
def disconnect(self, obj):
raise AttemptedCardinalityViola
|
tion("Cardinality one, cannot disconnect use reconnect")
def connect(self, obj, properties=None):
if not hasattr(self.source, '_id'):
raise ValueError("Node has not been saved cannot connect!")
if len(self):
raise AttemptedCardinalityViolation("Node already has one relationship")
else:
return super(One, self).connect(obj, properties)
class AttemptedCardinalityViolation(Exception):
pass
class CardinalityViolation(Exception)
|
:
def __init__(self, rel_manager, actual):
self.rel_manager = str(rel_manager)
self.actual = str(actual)
def __str__(self):
return "CardinalityViolation: Expected {0} got {1}".format(self.rel_manager, self.actual)
|
mountainstorm/urwid
|
urwid/container.py
|
Python
|
lgpl-2.1
| 84,566
| 0.002814
|
#!/usr/bin/python
#
# Urwid container widget classes
# Copyright (C) 2004-2012 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
from itertools import chain, repeat
from urwid.util import is_mouse_press
from urwid.widget import (Widget, Divider, FLOW, FIXED, PACK, BOX, WidgetWrap,
GIVEN, WEIGHT, LEFT, RIGHT, RELATIVE, TOP, BOTTOM, CLIP, RELATIVE_100)
from urwid.decoration import (Padding, Filler, calculate_left_right_padding,
calculate_top_bottom_filler, normalize_align, normalize_width,
normalize_valign, normalize_height, simplify_align, simplify_width,
simplify_valign, simplify_height)
from urwid.monitored_list import MonitoredList, MonitoredFocusList
from urwid.canvas import (CompositeCanvas, CanvasOverlay, CanvasCombine,
SolidCanvas, CanvasJoin)
class WidgetContainerMixin(object):
"""
Mixin class for widget containers implementing common container methods
"""
def __getitem__(self, position):
"""
Container short-cut for self.contents[position][0].base_widget
which means "give me the child widget at position without any
widget decorations".
This allows for concise traversal of nested container widgets
such as:
my_widget[position0][position1][position2] ...
"""
return self.contents[position][0].base_widget
def get_focus_path(self):
"""
Return the .focus_position values starting from this container
and proceeding along each child widget until reaching a leaf
(non-container) widget.
"""
out = []
w = self
while True:
try:
p = w.focus_position
except IndexError:
return out
out.append(p)
w = w.focus.base_widget
def set_focus_path(self, positions):
"""
Set the .focus_position property starting from this container
|
widget and proceeding along newly focused child widgets. Any
failed assignment due do incompatible position types or invalid
positions will raise an IndexError.
This method may be used to restore a particular widget to the
focus by passing in the value returned from an earlier call to
get_focus_path().
positions -- sequence of positions
|
"""
w = self
for p in positions:
if p != w.focus_position:
w.focus_position = p # modifies w.focus
w = w.focus.base_widget
def get_focus_widgets(self):
"""
Return the .focus values starting from this container
and proceeding along each child widget until reaching a leaf
(non-container) widget.
Note that the list does not contain the topmost container widget
(i.e, on which this method is called), but does include the
lowest leaf widget.
"""
out = []
w = self
while True:
w = w.base_widget.focus
if w is None:
return out
out.append(w)
class WidgetContainerListContentsMixin(object):
"""
Mixin class for widget containers whose positions are indexes into
a list available as self.contents.
"""
def __iter__(self):
"""
Return an iterable of positions for this container from first
to last.
"""
return xrange(len(self.contents))
def __reversed__(self):
"""
Return an iterable of positions for this container from last
to first.
"""
return xrange(len(self.contents) - 1, -1, -1)
class GridFlowError(Exception):
pass
class GridFlow(WidgetWrap, WidgetContainerMixin, WidgetContainerListContentsMixin):
"""
The GridFlow widget is a flow widget that renders all the widgets it
contains the same width and it arranges them from left to right and top to
bottom.
"""
def sizing(self):
return frozenset([FLOW])
def __init__(self, cells, cell_width, h_sep, v_sep, align):
"""
:param cells: list of flow widgets to display
:param cell_width: column width for each cell
:param h_sep: blank columns between each cell horizontally
:param v_sep: blank rows between cells vertically
(if more than one row is required to display all the cells)
:param align: horizontal alignment of cells, one of\:
'left', 'center', 'right', ('relative', percentage 0=left 100=right)
"""
self._contents = MonitoredFocusList([
(w, (GIVEN, cell_width)) for w in cells])
self._contents.set_modified_callback(self._invalidate)
self._contents.set_focus_changed_callback(lambda f: self._invalidate())
self._contents.set_validate_contents_modified(self._contents_modified)
self._cell_width = cell_width
self.h_sep = h_sep
self.v_sep = v_sep
self.align = align
self._cache_maxcol = None
self.__super.__init__(None)
# set self._w to something other than None
self.get_display_widget(((h_sep+cell_width)*len(cells),))
def _invalidate(self):
self._cache_maxcol = None
self.__super._invalidate()
def _contents_modified(self, slc, new_items):
for item in new_items:
try:
w, (t, n) = item
if t != GIVEN:
raise ValueError
except (TypeError, ValueError):
raise GridFlowError("added content invalid %r" % (item,))
def _get_cells(self):
ml = MonitoredList(w for w, t in self.contents)
def user_modified():
self._set_cells(ml)
ml.set_modified_callback(user_modified)
return ml
def _set_cells(self, widgets):
focus_position = self.focus_position
self.contents = [
(new, (GIVEN, self._cell_width)) for new in widgets]
if focus_position < len(widgets):
self.focus_position = focus_position
cells = property(_get_cells, _set_cells, doc="""
A list of the widgets in this GridFlow
.. note:: only for backwards compatibility. You should use the new
use the new standard container property :attr:`contents` to modify
GridFlow contents.
""")
def _get_cell_width(self):
return self._cell_width
def _set_cell_width(self, width):
focus_position = self.focus_position
self.contents = [
(w, (GIVEN, width)) for (w, options) in self.contents]
self.focus_position = focus_position
self._cell_width = width
cell_width = property(_get_cell_width, _set_cell_width, doc="""
The width of each cell in the GridFlow. Setting this value affects
all cells.
""")
def _get_contents(self):
return self._contents
def _set_contents(self, c):
self._contents[:] = c
contents = property(_get_contents, _set_contents, doc="""
The contents of this GridFlow as a list of (widget, options)
tuples.
options is currently a tuple in the form `('fixed', number)`.
number is the number of screen columns to allocate to this cell.
'fixed' is the only type accepted at this time.
This list may be modified like a normal list and the GridFlow
widget will update automatically.
|
jihunhamm/Crowd-ML
|
client/python/loss_nndemo1.py
|
Python
|
apache-2.0
| 3,849
| 0.030657
|
import numpy as np
from scipy.optimize import check_grad
## Two-layer NN with ReLU
# Tw
|
o-layer NN, with 200 units per layer
|
with ReLu ai = max(0,oi)
# X - (W01) - Layer1 - (W12) - Layer2 - (W23) - Output
# ((D+1)*nh) + ((nh+1)*nh) + ((nh+1)*K)
nh = 200
def getAvgGradient(w, X, y, L, K):
[N,D] = X.shape
W01,b1,W12,b2,W23,b3 = parseParams(w,D,K)
# Forward pass
h1 = np.maximum(0, np.dot(X, W01) + np.tile(b1,(N,1))) # N x nh, ReLU
h2 = np.maximum(0, np.dot(h1, W12) + np.tile(b2,(N,1))) # N x nh, ReLU
scores = np.dot(h2, W23) + np.tile(b3,(N,1)) # N x K
exp_scores = np.exp(scores-np.tile(np.max(scores,axis=1,keepdims=True),(1,K)))
probs = exp_scores / np.tile(exp_scores.sum(axis=1,keepdims=True),(1,K)) # N x K
l = -np.log(probs[range(N),y]).mean() + .5*L*((W01**2).sum()+(W12**2).sum()+(W23**2).sum())
# Backward pass
dscores = probs # N x K
dscores[range(N),y] -= 1
#dscores /= N
dW23 = np.dot(h2.T, dscores)/N + L*W23 # nh x K
db3 = np.sum(dscores, axis=0, keepdims=True)/N # nh x 1
dh2 = np.dot(dscores, W23.T) # N x K x K x nh = N x nh
dh2[h2 <= 0.] = 0.
dW12 = np.dot(h1.T, dh2)/N + L*W12
db2 = np.sum(dh2, axis=0, keepdims=True)/N
dh1 = np.dot(dh2, W12.T)
dh1[h1 <= 0.] = 0.
dW01 = np.dot(X.T, dh1)/N + L*W01
db1 = np.sum(dh1, axis=0, keepdims=True)/N
g = np.concatenate((dW01.flatten(), db1.flatten(), dW12.flatten(), db2.flatten(), dW23.flatten(), db3.flatten()), axis=0)
return (g, l)
def predict(w, X, K):
N,D = X.shape
W01,b1,W12,b2,W23,b3 = parseParams(w,D,K)
# Forward pass
h1 = np.maximum(0, np.dot(X, W01) + np.tile(b1,(N,1))) # N x nh, ReLU
h2 = np.maximum(0, np.dot(h1, W12) + np.tile(b2,(N,1))) # N x nh, ReLU
scores = np.dot(h2, W23) + np.tile(b3,(N,1)) # N x K
#exp_scores = np.exp(scores-np.tile(np.max(scores,axis=1,keepdims=True),(1,K)))
#probs = exp_scores / np.tile(exp_scores.sum(axis=1,keepdims=True),(1,K)) # N x K
#ypred = np.argmax(probs,axis=1)
ypred = np.argmax(scores,axis=1)
return ypred
def parseParams(w,D,K):
cnt = 0
W01 = w[:D*nh].reshape((D,nh))
cnt += D*nh
b1 = w[cnt:cnt+nh].reshape((1,nh))
cnt += nh
W12 = w[cnt:cnt+nh*nh].reshape((nh,nh))
cnt += nh*nh
b2 = w[cnt:cnt+nh].reshape((1,nh))
cnt += nh
W23 = w[cnt:cnt+nh*K].reshape((nh,K))
cnt += nh*K
b3 = w[cnt:cnt+K].reshape((1,K))
cnt += K
if (cnt != w.size):
print 'Error: wrong param size'
exit()
return (W01,b1,W12,b2,W23,b3)
def init(D,K):
d = (D+1)*nh + (nh+1)*nh + (nh+1)*K
w = 1.e-1*np.random.normal(size=(d,))
#w = np.zeros((d,))
return w
def loss(w, X, y, L, K):
_,l = getAvgGradient(w, X, y, L, K)
return l
def grad(w, X, y, L, K):
g,_ = getAvgGradient(w, X, y, L, K)
return g
def self_test1():
D = 100
K = 2
N = 10
L = 1e-6
# check parsing
W01 = np.random.randn(D,nh)
b1 = np.random.randn(1,nh)
W12 = np.random.randn(nh,nh)
b2 = np.random.randn(1,nh)
W23 = np.random.randn(nh,K)
b3 = np.random.randn(1,K)
w = np.concatenate((W01.flatten(), b1.flatten(), W12.flatten(), b2.flatten(), W23.flatten(), b3.flatten()), axis=0)
W01_,b1_,W12_,b2_,W23_,b3_ = parseParams(w,D,K)
print ((W01-W01_)**2).sum()/(W01**2).sum()
print ((b1-b1_)**2).sum()/(b1**2).sum()
print ((W12-W12_)**2).sum()/(W12**2).sum()
print ((b2-b2_)**2).sum()/(b2**2).sum()
print ((W23-W23_)**2).sum()/(W23**2).sum()
print ((b3-b3_)**2).sum()/(b3**2).sum()
w = init(D, K)
w = 1e-0*np.random.normal(size=w.size)
X = np.random.normal(size=(N,D))
y = np.random.randint(K,size=(N,))
err = check_grad(loss, grad, w, X, y, L, K)
print err
|
boskee/simplui
|
setup.py
|
Python
|
bsd-3-clause
| 803
| 0.05604
|
import ez_setup
ez_setup.use_setuptools()
from setuptools import setup, find_packages
setup(
name = 'simplui',
version = '1.0.4',
author = 'Tristam MacDonald',
author_email = 'swiftcoder@gmail.com',
description = 'Light-weigh
|
t GUI toolkit for pyglet',
url = 'http://simplui.goog
|
lecode.com/',
platforms = ['all'],
license = 'BSD',
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Topic :: Scientific/Engineering :: Human Machine Interfaces',
'Topic :: Software Development :: User Interfaces',
],
packages = find_packages(),
install_requires = ['simplejson >= 2.0', 'pyglet >= 1.1']
)
|
bobintetley/asm3
|
import/org/org_sb1875.py
|
Python
|
gpl-3.0
| 11,412
| 0.004118
|
#!/usr/bin/python
import asm
"""
Import script for sb1875 csv files
1st March, 2019
"""
START_ID = 1000
ANIMAL_FILENAME = "/home/robin/tmp/asm3_import_data/sb1875_csv/ASM_Animal_Master.csv"
LOG_FILENAME = "/home/robin/tmp/asm3_import_data/sb1875_csv/ASM_Animal_Log.csv"
PERSON_FILENAME = "/home/robin/tmp/asm3_import_data/sb1875_csv/ASM_People.csv"
def getdate(d):
if d == "02/01/1900": return None # Weird quirk of their files
return asm.getdate_ddmmyyyy(d)
# --- START OF CONVERSION ---
owners = []
movements = []
animals = []
animaltests = []
animalmedicals = []
animalvaccinations = []
logs = []
ppa = {}
ppo = {}
asm.setid("animal", START_ID)
asm.setid("animaltest", START_ID)
asm.setid("animalmedical", START_ID)
asm.setid("animalmedicaltreatment", START_ID)
asm.setid("animalvaccination", START_ID)
asm.setid("log", START_ID)
asm.setid("owner", START_ID)
asm.setid("adoption", START_ID)
print "\\set ON_ERROR_STOP\nBEGIN;"
print "DELETE FROM animal WHERE ID >= %s;" % START_ID
print "DELETE FROM animalmedical WHERE ID >= %s;" % START_ID
print "DELETE FROM animalmedicaltreatment WHERE ID >= %s;" % START_ID
print "DELETE FROM animaltest WHERE ID >= %s;" % START_ID
print "DELETE FROM animalvaccination WHERE ID >= %s;" % START_ID
print "DELETE FROM log WHERE ID >= %s;" % START_ID
print "DELETE FROM owner WHERE ID >= %s;" % START_ID
print "DELETE FROM adoption WHERE ID >= %s;" % START_ID
print "DELETE FROM media;" # They want media cleared out, shouldn't be enough to worry about orphans
# Create an unknown owner
uo = asm.Owner()
owners.append(uo)
uo.OwnerSurname = "Unknown Owner"
uo.OwnerName = uo.OwnerSurname
# Deal with people first
for d in asm.csv_to_list(PERSON_FILENAME, remove_non_ascii=True):
# Each row contains a person
o = asm.Owner()
owners.append(o)
ppo[d["People_Ctr"]] = o
o.OwnerForeNames = d["PERSONFIRSTNAME"]
o.OwnerSurname = d["PERSONLASTNAME"]
o.OwnerName = o.OwnerForeNames + " " + o.OwnerSurname
o.OwnerAddress = d["PERSONADDRESS"]
o.OwnerTown = d["PERSONCITY"]
o.OwnerCounty = d["PERSONSTATE"]
o.OwnerPostcode = d["PERSONZIPCODE"]
o.EmailAddress = d["PERSONEMAIL"]
o.WorkTelephone = d["PERSONWORKPHONE"]
o.MobileTelephone = d["PERSONCELLPHONE"]
o.IsBanned = asm.iif(d["PERSONFLAGS"].find("Banned") != -1, 1, 0)
o.IsDonor = asm.iif(d["PERSONDONOR"] == "1", 1, 0)
o.IsFosterer = asm.iif(d["PERSONFOSTERER"] == "1", 1, 0)
o.Com
|
ments = d["PERSONCOMMENTS"]
o.JurisdictionID = asm.jurisdiction_from_db(d["PERSONADDITIONALCOUNCILNAME"])
# Animal intake records
for d in asm.csv_to_list(ANIMAL_FILENAME, remove_non_ascii=True):
# Each row contains an animal with intake info:
a = asm.Animal()
animals.append(a)
ppa[d["Animal_Ident
|
ifier"]] = a
a.AnimalTypeID = asm.type_from_db(d["Pound_Reason"])
a.SpeciesID = asm.species_id_for_name(d["Species"])
a.AnimalName = d["Name"]
if a.AnimalName.strip() == "":
a.AnimalName = "(unknown)"
a.DateBroughtIn = getdate(d["Date_Admitted"]) or asm.today()
if d["Date_Of_Birth"].strip() != "":
a.DateOfBirth = getdate(d["Date_Of_Birth"])
if a.DateOfBirth is None:
a.DateOfBirth = asm.subtract_days(a.DateBroughtIn, 365)
a.CreatedDate = a.DateBroughtIn
a.LastChangedDate = a.DateBroughtIn
#asm.additional_field("Legacy_Tag_No", 0, a.ID, d["Tag_no"])
#asm.additional_field("Legacy_Tag_No_Q", 0, a.ID, d["Tag_no_qualifier"])
a.ShortCode = "%s:%s" % (d["Tag_no"], d["Tag_no_qualifier"])
a.ShelterCode = a.ShortCode
a.BreedID = asm.breed_from_db(d["Breed"], 1)
a.BreedName = d["Breed"]
if d["Cross_Breed"] != "":
a.Breed2ID = asm.breed_from_db(d["Cross_Breed"], 1)
a.CrossBreed = 1
a.BreedName = "%s / %s" % (d["Breed"], d["Cross_Breed"])
#a.BaseColourID = asm.colour_id_for_names(d["Base_Colour"], d["Secondary_Colour"])
a.BaseColourID = asm.colour_from_db(d["Base_"])
a.AnimalComments = d["Notes"]
a.Sex = asm.getsex_mf(d["Sex"])
a.Size = asm.size_id_for_name(d["Size"])
a.NeuteredDate = getdate(d["Date_Desexed"])
if a.NeuteredDate is not None: a.Neutered = 1
a.IsNotForRegistration = 0
a.IsNotAvailableForAdoption = 1
a.IdentichipNumber = d["Microchip_no"]
a.Identichip2Number = d["Alternate_Chip_No"]
asm.additional_field("MChipType", 5, a.ID, d["Microchip_Type"]) # MChipType additional field
if a.IdentichipNumber != "": a.Identichipped = 1
if a.IdentichipNumber == "0":
a.Identichipped = 0
a.IdentichipNumber = ""
a.IdentichipDate = asm.getdate_ddmmyyyy(d["Date_Microchipped"])
a.IsGoodWithCats = 2
a.IsGoodWithDogs = 2
a.IsGoodWithChildren = 2
a.HouseTrained = 0
a.AcceptanceNumber = d["Litter No"]
comments = "Breed: " + d["Breed"] + "/" + d["Cross_Breed"]
comments += "\nSpecies: " + d["Species"]
comments += "\nMicrochip Type: " + d["Microchip_Type"]
comments += "\nSize: " + d["Size"]
comments += "\nCondition: " + d["Animal_Condition"]
a.HiddenAnimalDetails = comments
entrycomments = "Pound Reason: " + d["Pound_Reason"]
entrycomments += "\nWhere Found: " + d["Where_found"]
entrycomments += "\nStreet Found: " + d["Street_Found_In"]
a.ReasonForEntry = entrycomments
a.EntryReasonID = 17 # Surrender
#if d["InShelterSearchFlag"] == "N":
# a.Archived = 1
if d["Location"] != "": a.ShelterLocation = asm.location_from_db(d["Location"])
if d["Unit"] != "": a.ShelterLocationUnit = d["Unit"]
# Animal log, recording medical history and linking adoptions/surrenderers/etc
for d in asm.csv_to_list(LOG_FILENAME, remove_non_ascii=True):
if d["Animal_Identifier"] not in ppa: continue
a = ppa[d["Animal_Identifier"]]
o = uo
if d["People_ctr"] != "": o = ppo[d["People_ctr"]]
ed = getdate(d["Entry_date"])
if not ed: continue
if d["Weight"] != "0" and d["Weight"] != "":
try:
a.Weight = float(d["Weight"])
except ValueError:
pass
l = asm.Log()
logs.append(l)
l.LogTypeID = 4 # Weight
l.LinkID = a.ID
l.LinkType = 0
l.Date = ed
l.Comments = d["Weight"]
if d["Action"] == "Admission" and d["Log_Description"] == "Owner Surrender" and o:
a.OriginalOwnerID = o.ID
a.BroughtInByOwnerID = o.ID
a.DateBroughtIn = ed
a.CreatedBy = d["User_Id"]
elif d["Action"] == "Veterinary" and d["Log_Description"] == "Desexed":
a.Neutered = 1
a.NeuteredDate = ed
animalmedicals.append( asm.animal_regimen_single(a.ID, ed, d["Log_Description"], "N/A", d["Log_Notes"]) )
elif d["Action"] == "Veterinary":
animalmedicals.append( asm.animal_regimen_single(a.ID, ed, d["Log_Description"], "N/A", d["Log_Notes"]) )
elif d["Action"] == "Vaccination":
vacctypes = {
"C3": 16,
"C5": 18,
"F3": 22,
"F4": 23
}
av = asm.AnimalVaccination()
animalvaccinations.append(av)
av.AnimalID = a.ID
av.VaccinationID = 8
for k, i in vacctypes.iteritems():
if d["Log_Description"].find(k) != -1: av.VaccinationID = i
av.DateRequired = ed
av.DateOfVaccination = ed
av.Comments = "Type: %s\n%s" % (d["Log_Description"], d["Log_Notes"])
av.CreatedBy = d["User_Id"]
elif d["Action"] == "Foster Care" and d["Log_Description"] == "Foster Care":
o.IsFosterer = 1
m = asm.Movement()
m.AnimalID = a.ID
m.OwnerID = o.ID
m.MovementType = 2
m.MovementDate = ed
m.Comments = d["Log_Notes"]
a.Archived = 1
a.ActiveMovementID = m.ID
a.ActiveMovementDate = m.MovementDate
a.ActiveMovementType = 2
a.LastChangedDate = ed
movements.append(m)
elif d["Action"] == "Foster Care" and d["Log_Description"] == "Carer Return":
# Return this person's most recent foster
for m in movements:
if m.AnimalID == a.ID and m.ReturnDate is None and m.MovementType == 2 and m.OwnerID == o.ID:
|
dantebarba/docker-media-server
|
plex/Subliminal.bundle/Contents/Code/logger.py
|
Python
|
gpl-3.0
| 1,288
| 0.004658
|
import logging
def registerLoggingHander(dependencies):
plexHandler = PlexLoggerHandler()
for dependency in dependencies:
Log.Debug("Registering LoggerHandler for
|
dependency: %s" % dependency)
log = logging.getLogger(dependency)
log.setLevel('DEBUG')
log.addHandler(plexHandler)
class PlexLoggerHandler(logging.StreamHandler):
def __init__(self, level=0):
super(PlexLoggerHandler, self).__init__(level)
def getF
|
ormattedString(self, record):
return record.name + ": " + record.getMessage()
def emit(self, record):
if record.levelno == logging.DEBUG:
Log.Debug(self.getFormattedString(record))
elif record.levelno == logging.INFO:
Log.Info(self.getFormattedString(record))
elif record.levelno == logging.WARNING:
Log.Warn(self.getFormattedString(record))
elif record.levelno == logging.ERROR:
Log.Error(self.getFormattedString(record))
elif record.levelno == logging.CRITICAL:
Log.Critical(self.getFormattedString(record))
elif record.levelno == logging.FATAL:
Log.Exception(self.getFormattedString(record))
else:
Log.Error("UNKNOWN LEVEL: %s", record.getMessage())
|
Luiti/etl_utils
|
etl_utils/list_utils.py
|
Python
|
mit
| 1,250
| 0.0016
|
# -*- coding: utf-8 -*-
from collections import Counter
from .design_pattern import singleton
@singleton()
class ListUtilsClass(object):
def most_common_inspect(self, list1):
new_list = []
for s1 in list1:
if not isinstance(s1, unicode):
s1 = str(s1).decode("UTF-8")
new_list.append(s1)
cc = Counter(new_list).most_common()
if len(cc) > 0:
max_len = len(max([c1[0] for c1 in cc], key=lambda x1: len(x1))) + 5
for c1 in cc:
print c1[0].ljust(max_len, ' '), ' : ', c1[1]
return cc
def uniq_seqs(self, seqs, uniq_lambda=N
|
one):
if uniq_lambda is None:
return list(set(seqs))
__uniq = set([])
__remove_idxes = []
for idx1, seq1 in enumerate(seqs[:]):
__id = uniq_lambda(seq1)
if __id in __uniq:
__remove_idxes.append(idx1)
else:
__uniq.add(_
|
_id)
new_seqs = []
for idx1, seq1 in enumerate(seqs[:]):
if idx1 not in __remove_idxes:
new_seqs.append(seq1)
seqs = new_seqs
return seqs
ListUtils = ListUtilsClass()
uniq_seqs = ListUtils.uniq_seqs
|
awood/orbach
|
orbach/core/urls.py
|
Python
|
gpl-3.0
| 1,005
| 0
|
'''
Copyright 2015
This file is part of Orbach.
Orbach is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Orbach is distributed in the hope that it will be useful,
but WITHOUT ANY
|
WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a
|
copy of the GNU General Public License
along with Orbach. If not, see <http://www.gnu.org/licenses/>.
'''
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from orbach.core import views
router = DefaultRouter()
router.register(r'galleries', views.GalleryViewSet)
router.register(r'image_files', views.ImageFileViewSet)
router.register(r'users', views.UserViewSet)
urlpatterns = [
url(r'^', include(router.urls)),
]
|
ibobalo/python-for-android
|
pythonforandroid/recipes/twisted/__init__.py
|
Python
|
mit
| 1,038
| 0.000963
|
import glob
from pythonforandroid.toolchain import (
CythonRecipe,
Recipe,
current_directory,
info,
shprint,
)
from os.path import join
import sh
class TwistedRecipe(CythonRecipe):
version = '17.9.0'
url = 'https://github.com/twisted/twisted/archive/twisted-{version}.tar.gz'
depends = ['setuptools', 'zope_interfac
|
e', 'incremental', 'constantly']
call_hostpython_via_targetpython = False
in
|
stall_in_hostpython = True
def prebuild_arch(self, arch):
super(TwistedRecipe, self).prebuild_arch(arch)
# TODO Need to whitelist tty.pyo and termios.so here
print('Should remove twisted tests etc. here, but skipping for now')
def get_recipe_env(self, arch):
env = super(TwistedRecipe, self).get_recipe_env(arch)
# We add BUILDLIB_PATH to PYTHONPATH so twisted can find _io.so
env['PYTHONPATH'] = ':'.join([
self.ctx.get_site_packages_dir(),
env['BUILDLIB_PATH'],
])
return env
recipe = TwistedRecipe()
|
portnov/sverchok
|
tests/cubic_spline_tests.py
|
Python
|
gpl-3.0
| 1,513
| 0.017845
|
import numpy as np
from sverchok.utils.testing import *
from sverchok.utils.logging import debug, info
from sverchok.utils.geom import CubicSpline
class CubicSplineTests(SverchokTestCase):
def setUp(self):
super().setUp()
vertices = [(-1, -1, 0), (0, 0, 0), (1, 2, 0), (2, 3, 0)]
self.spline = CubicSpline(vertices, metric="DISTANCE")
def test_eval(self):
t_in = np.array([0.0, 0.1, 0.4, 0.5, 0.7, 1.0])
result = self.spline.eval(t_in)
#info(result)
expected_result = np.array(
|
[[-1.0, -1.0, 0.0 ],
[-0.60984526, -0.66497986, 0.0 ],
[ 0.29660356, 0.5303721, 0.0 ],
[ 0.5, 1.0, 0.0 ],
[ 0.94256655, 1.91347161, 0.0 ],
[ 2.0, 3.0, 0.0 ]])
|
self.assert_numpy_arrays_equal(result, expected_result, precision=8)
def test_tangent(self):
t_in = np.array([0.0, 0.1, 0.4, 0.5, 0.7, 1.0])
result = self.spline.tangent(t_in)
#info(result)
expected_result = np.array(
[[ 0.00789736, 0.00663246, 0.0 ],
[ 0.00761454, 0.0068363, 0.0 ],
[ 0.00430643, 0.00922065, 0.0 ],
[ 0.0039487, 0.0094785, 0.0 ],
[ 0.00537964, 0.00844713, 0.0 ],
[ 0.00789736, 0.00663246, 0.0 ]])
self.assert_numpy_arrays_equal(result, expected_result, precision=8)
|
PhilippMundhenk/IVNS
|
ECUSimulation/components/base/ecu/types/abst_ecu.py
|
Python
|
mit
| 5,802
| 0.018614
|
from components.base.automotive_component import AutomotiveComponent
from config import project_registration as proj
from tools.ecu_logging import ECULogger as L
import random
class AbstractECU(AutomotiveComponent):
'''
This abstract class defines the interface of
an E
|
CU as it is found in an automotive network
'''
def __init__(self, sim_env, ecu_id, data_rate):
''' Constructor
Input: sim_env simpy.Environment environment of this
|
component
ecu_id string id of the corresponding AbstractECU
data_rate float datarate of the ecu
Output: -
'''
AutomotiveComponent.__init__(self, sim_env)
self._ABSTRACT_ECU = True
self._ecu_id = ecu_id # ID of the ECU
self.ecuSW = None # what is done
self.ecuHW = None # what is used to make it happen
self.MessageClass = proj.BUS_MSG_CLASS # what kind of messages are exchanged
self.connected_bus = None # Bus that is connected to the ECU
self.data_rate = proj.BUS_ECU_DATARATE # Datarate with which bits are put on the bus
self._effective_datarate = 0 # Bit per second
self._effective_bittime = 0 # seconds
self._jitter = 1
self.startup_delay = False
def set_startup_delay(self, start_time):
''' this method sets the startup delay. When this delay is set
this ECU is activated after the defined start time
Input: start_time float time when the ECU starts running
Output: -
'''
self.startup_delay = start_time
if start_time:
self.ecuHW.transceiver.ecu_is_active = False
def set_jitter(self, jitter_range):
''' sets the jitter which will be multiplied onto each
timeout value. It will be within jitter_range
e.g. jitter_range of 0.1 means that any random value
between 1.0 and 1.1 will be used
Input: jitter_range: float dispersion from 1.0
Output: -
'''
# determine jitter
self._jitter = 1 + (random.random() * jitter_range)
# apply jitter on layers
try: self.ecuSW.comm_mod.physical_lay.transceiver._jitter = self._jitter
except: pass
try: self.ecuSW.comm_mod._jitter = self._jitter
except: pass
try: self.ecuSW.comm_mod.transp_lay._jitter = self._jitter
except: pass
try: self.ecuSW.comm_mod.datalink_lay._jitter = self._jitter
except: pass
try: self.ecuSW.comm_mod.physical_lay.transceiver._jitter = self._jitter
except: pass
try: self.ecuSW.app_lay._jitter = self._jitter
except: pass
def _connect_hw_sw(self):
''' connect all hardware components with their
associated software connections
Input: -
Output: -
'''
# application Layer
self.ecuSW.app_lay.microcontroller = self.ecuHW.mic_controller
# physical and data link layer '''
self.ecuSW.comm_mod.datalink_lay.controller = self.ecuHW.controller
self.ecuSW.comm_mod.physical_lay.transceiver = self.ecuHW.transceiver
self.ecuSW.comm_mod.datalink_lay.effective_bittime = self._effective_bittime
def connect_to(self, bus):
''' connects the bus to the ECU
Input: bus CANBus Bus that will be connected
Output: -
'''
self.ecuHW.transceiver.connect_bus(bus)
self.connected_bus = bus
def get_type_id(self):
''' returns the id of this ECU type
Input: -
Output: ecu_type string type of this ECU; e.g.'TLSECU'
'''
raise NotImplementedError(" get_type_id() was not implemented by class %s" % self.__class__)
def get_rec_buffer_items(self):
''' returns the current content of the receiving buffer
Input: -
Output: rec_buffer list list of items in the receiving buffer
'''
return self.ecuHW.controller.receive_buffer.items
def get_trans_buffer_items(self):
''' returns the current content of the transmit buffer
Input: -
Output: trans_buffer list list of items in the transmit buffer
'''
return self.ecuHW.controller.transmit_buffer.items
def install_hw_filter(self, allowed_items_list):
''' installs a hardware filter that filters all
message ids that are not defined in the passed
list. This filter is applied on the transceiver
Input: allowed_items_list list list of message_ids that are let pass by the transceiver
Output: -
'''
try:
self.ecuHW.transceiver.install_filter(allowed_items_list)
except:
L().log_err(300)
def _GET_ABSTRACT_ECU(self):
''' marker that this is a AbstractECU '''
return self._ABSTRACT_ECU
@property
def ecu_id(self):
return self._ecu_id
@ecu_id.setter
def ecu_id(self, value):
self._ecu_id = value
def set_monitor(self, monitor):
self.monitor = monitor
|
tomasy23/evertonkrosnodart
|
tools/export/blender25/io_scene_dae/__init__.py
|
Python
|
mit
| 6,444
| 0.006518
|
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
bl_info = {
"name": "Better Collada Exporter",
"author": "Juan Linietsky",
"blender": (2, 5, 8),
"api": 38691,
"location": "File > Import-Export",
"description": ("Export DAE Scenes, This plugin actually works better! otherwise contact me."),
"warning": "",
"wiki_url": ("http://www.godotengine.org"),
"tracker_url": "",
"support": 'OFFICIAL',
"category": "Import-Export"}
if "bpy" in locals():
import imp
if "export_dae" in locals():
imp.reload(export_dae)
import bpy
from bpy.props import StringProperty, BoolProperty, FloatProperty, EnumProperty
from bpy_extras.io_utils import (ExportHelper,
path_reference_mode,
axis_conversion,
)
class ExportDAE(bpy.types.Operator
|
, ExportHelper):
'''Selection to DAE'''
bl_idname = "export_scene.dae"
bl_label = "Export DAE"
bl_options = {'PRESET'}
filename_ext = ".dae"
filter_glob = StringProperty(default="*.dae", options={'
|
HIDDEN'})
# List of operator properties, the attributes will be assigned
# to the class instance from the operator settings before calling.
object_types = EnumProperty(
name="Object Types",
options={'ENUM_FLAG'},
items=(('EMPTY', "Empty", ""),
('CAMERA', "Camera", ""),
('LAMP', "Lamp", ""),
('ARMATURE', "Armature", ""),
('MESH', "Mesh", ""),
('CURVE', "Curve", ""),
),
default={'EMPTY', 'CAMERA', 'LAMP', 'ARMATURE', 'MESH','CURVE'},
)
use_export_selected = BoolProperty(
name="Selected Objects",
description="Export only selected objects (and visible in active layers if that applies).",
default=False,
)
use_mesh_modifiers = BoolProperty(
name="Apply Modifiers",
description="Apply modifiers to mesh objects (on a copy!).",
default=True,
)
use_tangent_arrays = BoolProperty(
name="Tangent Arrays",
description="Export Tangent and Binormal arrays (for normalmapping).",
default=False,
)
use_triangles = BoolProperty(
name="Triangulate",
description="Export Triangles instead of Polygons.",
default=False,
)
use_copy_images = BoolProperty(
name="Copy Images",
description="Copy Images (create images/ subfolder)",
default=False,
)
use_active_layers = BoolProperty(
name="Active Layers",
description="Export only objects on the active layers.",
default=True,
)
use_exclude_ctrl_bones = BoolProperty(
name="Exclude Control Bones",
description="Exclude skeleton bones with names that begin with 'ctrl'.",
default=True,
)
use_anim = BoolProperty(
name="Export Animation",
description="Export keyframe animation",
default=False,
)
use_anim_action_all = BoolProperty(
name="All Actions",
description=("Export all actions for the first armature found in separate DAE files"),
default=False,
)
use_anim_skip_noexp = BoolProperty(
name="Skip (-noexp) Actions",
description="Skip exporting of actions whose name end in (-noexp). Useful to skip control animations.",
default=True,
)
use_anim_optimize = BoolProperty(
name="Optimize Keyframes",
description="Remove double keyframes",
default=True,
)
anim_optimize_precision = FloatProperty(
name="Precision",
description=("Tolerence for comparing double keyframes "
"(higher for greater accuracy)"),
min=1, max=16,
soft_min=1, soft_max=16,
default=6.0,
)
use_metadata = BoolProperty(
name="Use Metadata",
default=True,
options={'HIDDEN'},
)
@property
def check_extension(self):
return True#return self.batch_mode == 'OFF'
def check(self, context):
return True
"""
isretur_def_change = super().check(context)
return (is_xna_change or is_def_change)
"""
def execute(self, context):
if not self.filepath:
raise Exception("filepath not set")
""" global_matrix = Matrix()
global_matrix[0][0] = \
global_matrix[1][1] = \
global_matrix[2][2] = self.global_scale
"""
keywords = self.as_keywords(ignore=("axis_forward",
"axis_up",
"global_scale",
"check_existing",
"filter_glob",
"xna_validate",
))
from . import export_dae
return export_dae.save(self, context, **keywords)
def menu_func(self, context):
self.layout.operator(ExportDAE.bl_idname, text="Better Collada (.dae)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func)
if __name__ == "__main__":
register()
|
pvdheijden/OpenCaster
|
libs/dvbobjects/dvbobjects/PSI/UNT.py
|
Python
|
gpl-2.0
| 2,864
| 0.013268
|
#! /usr/bin/env python
# This file is part of the dvbobjects library.
#
# Copyright 2009-2013 Lorenzo Pallara l.pallara@avalpa.com
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
import string
from dvbobjects.MPEG.Section import Section
from dvbobjects.utils import *
from dvbobjects.DVB.Descriptors import *
######################################################################
class update_notification_section(Section):
table_id = 0x4B
section_max_size = 4096
def pack_section_body(self):
self.table_id_extension = self.action_type << 8 | ((self.OUI >> 16) ^ ((self.OUI >> 8) & 0xFF) ^ (self.OUI & 0xFF))
# pack common_descriptor_loop
common_bytes = string.join(
map(lambda x: x.pack(),
self.common_descriptor_loop),
|
"")
# pack compatibility_descriptor_loop
compatibility_bytes = string.join(
map(lambda x: x.pack(),
self.compatibility_descriptor_loop),
"")
fmt = "!HBBH%ds%ds" % (len(common_bytes), len(compatibility_bytes))
return pack(fmt,
self.OUI >> 8,
self.OUI & 0xFF,
self.processing_order,
0xF000 | len(common_bytes),
common_bytes,
compatibility_bytes,
|
)
######################################################################
class unt_compatibility_descriptor_loop_item(DVBobject):
def pack(self):
# pack target_descriptor_loop
tdl_bytes = string.join(
map(lambda x: x.pack(),
self.target_descriptor_loop),
"")
# pack operational descriptor_loop
odl_bytes = string.join(
map(lambda x: x.pack(),
self.operational_descriptor_loop),
"")
fmt = "!%dsHH%dsH%ds" % (len(self.compatibility_descriptor), len(tdl_bytes), len(odl_bytes))
return pack(fmt,
self.compatibility_descriptor,
len(tdl_bytes) + len(odl_bytes),
0xF000 | len(tdl_bytes),
tdl_bytes,
0xF000 | len(odl_bytes),
odl_bytes,
)
|
Kunalpod/codewars
|
is_this_a_triangle.py
|
Python
|
mit
| 163
| 0.042945
|
#Kunal Gautam
#Codewars
|
: @Kunal
|
pod
#Problem name: Is this a triangle?
#Problem level: 7 kyu
def is_triangle(a, b, c):
return (a+b>c) and (b+c>a) and (c+a>b)
|
linea-it/dri
|
api/product/admin.py
|
Python
|
gpl-3.0
| 5,841
| 0.002226
|
from django.contrib import admin
from .models import *
class ProductAdmin(admin.ModelAdmin):
list_display = ('id', 'prd_process_id', 'prd_name',
'prd_display_name', 'prd_owner', 'prd_product_id', 'prd_date',
'prd_class', 'prd_filter', 'prd_is_public', 'prd_is_permanent',)
list_display_links = ('id', 'prd_process_id', 'prd_name',)
search_fields = ('prd_process_id', 'prd_name', 'prd_display_name', 'prd_product_id',)
class ProductReleaseAdmin(admin.ModelAdmin):
list_display = ('id', 'product', 'release',)
list_display_links = ('id', 'product', 'release',)
search_fields = ('product', 'release',)
class ProductTagAdmin(admin.ModelAdmin):
list_display = ('id', 'product', 'tag',)
list_display_links = ('id', 'product', 'tag',)
search_fields = ('product', 'tag',)
class FileAdmin(admin.ModelAdmin):
list_display = ('id', 'prd_name', 'prd_display_name',
'prd_class', 'fli_base_path', 'fli_name',)
list_display_links = ('id', 'prd_name', 'prd_display_name', 'prd_class',)
search_fields = ('fli_name',)
class TableAdmin(admin.ModelAdmin):
list_display = ('id', 'prd_name', 'prd_display_name',
'prd_class', 'tbl_database', 'tbl_schema', 'tbl_name',)
list_display_links = ('id', 'prd_name', 'prd_display_name',
'prd_class', 'tbl_schema', 'tbl_name',)
search_fields = ('tbl_schema', 'tbl_name',)
class CatalogAdmin(admin.ModelAdmin):
list_display = (
'id', 'prd_name', 'prd_display_name', 'prd_class', 'ctl_num_objects',
)
class MapAdmin(admin.ModelAdmin):
list_display = (
'id', 'prd_name', 'prd_display_name', 'prd_class', 'mpa_nside', 'mpa_ordering', 'prd_filter', 'prd_is_public', 'prd_is_permanent'
)
list_display_links = ('id', 'prd_name')
search_fields = ('prd_name',)
class CutOutJobAdmin(admin.ModelAdmin):
list_display = (
'id', 'cjb_product', 'cjb_display_name', 'cjb_status', 'cjb_tag', 'owner',
)
list_display_links = ('id',)
search_fields = ('cjb_display_name',)
class DesjobAdmin(admin.ModelAdmin):
list_display = (
'id', 'djb_cutout_job', 'djb_jobid', 'djb_status', 'djb_start_time', 'djb_finish_time', 'djb_message',
)
list_display_links = ('id',)
search_fields = ('djb_jobid',)
class CutoutAdmin(admin.ModelAdmin):
list_display = (
'id', 'cjb_cutout_job', 'ctt_object_id', 'ctt_object_ra', 'ctt_object_dec', 'ctt_img_format', 'ctt_filter',
'ctt_file_name', 'ctt_file_path', 'ctt_file_type', 'ctt_file_size', )
list_display_links = ('id',)
search_fields = ('id',)
class MaskAdmin(admin.ModelAdmin):
list_display = (
'id', 'prd_name', 'prd_display_name', 'prd_class', 'msk_filter',
)
list_display_links = ('id', 'prd_name')
search_fields = ('prd_name',)
class ProductContentAdmin(admin.ModelAdmin):
list_display = ('id', 'pcn_product_id', 'pcn_column_name', 'pcn_ucd')
list_display_links = ('pcn_column_name',)
search_fields = ('pcn_column_name'
|
,)
class ProductContentAssociationAdmin(admin.ModelAdmin):
list_display = ('id', 'pca_product', 'pca_class_content', 'pca_product_content',)
search_fields = ('
|
pca_product__prd_display_name', 'pca_product__prd_name')
class ProductContentSettingAdmin(admin.ModelAdmin):
list_display = ('id', 'pcs_content', 'pcs_setting', 'pcs_is_visible', 'pcs_order')
class ProductSettingAdmin(admin.ModelAdmin):
list_display = (
'id', 'cst_product', 'owner', 'cst_display_name', 'cst_description', 'cst_is_public', 'cst_is_editable',)
search_fields = ('cst_product__prd_display_name', 'cst_display_name', 'cst_description',)
class CurrentSettingAdmin(admin.ModelAdmin):
list_display = ('id', 'cst_product', 'cst_setting', 'owner',)
class WorkgroupAdmin(admin.ModelAdmin):
list_display = ('id', 'wgp_workgroup', 'owner',)
class WorkgroupUserAdmin(admin.ModelAdmin):
list_display = ('id', 'wgu_workgroup', 'wgu_user',)
class PermissionAdmin(admin.ModelAdmin):
list_display = ('id', 'prm_product', 'prm_user', 'prm_workgroup',)
class ProductRelatedAdmin(admin.ModelAdmin):
list_display = ('id', 'prl_product', 'prl_related', 'prl_relation_type', 'prl_cross_identification',)
class FiltersetdAdmin(admin.ModelAdmin):
list_display = ('id', 'product', 'owner', 'fst_name',)
class FilterConditionAdmin(admin.ModelAdmin):
list_display = ('id', 'filterset', 'fcd_property', 'fcd_property_name', 'fcd_operation', 'fcd_value')
class BookmarkedAdmin(admin.ModelAdmin):
list_display = ('id', 'product', 'owner', 'is_starred')
admin.site.register(Product, ProductAdmin)
admin.site.register(ProductRelease, ProductReleaseAdmin)
admin.site.register(ProductTag, ProductTagAdmin)
admin.site.register(File, FileAdmin)
admin.site.register(Table, TableAdmin)
admin.site.register(Catalog, CatalogAdmin)
admin.site.register(Map, MapAdmin)
admin.site.register(CutOutJob, CutOutJobAdmin)
admin.site.register(Desjob, DesjobAdmin)
admin.site.register(Cutout, CutoutAdmin)
admin.site.register(Mask, MaskAdmin)
admin.site.register(ProductContent, ProductContentAdmin)
admin.site.register(ProductContentAssociation, ProductContentAssociationAdmin)
admin.site.register(ProductContentSetting, ProductContentSettingAdmin)
admin.site.register(ProductSetting, ProductSettingAdmin)
admin.site.register(CurrentSetting, CurrentSettingAdmin)
admin.site.register(Permission, PermissionAdmin)
admin.site.register(ProductRelated, ProductRelatedAdmin)
admin.site.register(Workgroup, WorkgroupAdmin)
admin.site.register(WorkgroupUser, WorkgroupUserAdmin)
admin.site.register(Filterset, FiltersetdAdmin)
admin.site.register(FilterCondition, FilterConditionAdmin)
admin.site.register(BookmarkProduct, BookmarkedAdmin)
|
pczhaoyun/wolf
|
wolf/spiders/wolves/cnscg.py
|
Python
|
apache-2.0
| 1,092
| 0.003663
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import r
|
e
import urlparse
from scrapy import log
from scrapy.http import Request
from base.base_wolf import Base_Wolf
class Wolf(Base_Wolf):
def __init__(self, *args, **kwargs):
super(Wolf, self).__init__(*args, **kwargs)
self.name = 'cnscg'
self.see
|
d_urls = [
'http://www.cnscg.org/',
]
self.base_url = 'http://www.cnscg.org/'
self.rule['follow'] = re.compile(r'show-')
self.anchor['desc'] = "//*[@class='intro']"
def get_resource(self, item, response, tree):
item = super(Wolf, self).get_resource(item, response, tree)
resource = tree.xpath("//*[@class='original download']//a/@href")
downloads = [urlparse.urljoin(self.base_url, r) for r in resource if re.match(r'down.php', r)]
if len(downloads):
return self.download_bt(item, [Request(d, cookies=self.cookiejar._cookies,) for d in downloads])
else:
self.log("No Resource DropItem %s" % item['source'], level=log.WARNING)
return None
|
michaelBenin/django-oscar
|
setup.py
|
Python
|
bsd-3-clause
| 3,934
| 0.001017
|
#!/usr/bin/env python
"""
Installation script:
To release
|
a new version to PyPi:
- Ensure the version is correctly set in oscar.__init__.py
- Run: python setup.py sdist upload
"""
from setuptools import setup, find_packages
import os
import sys
fr
|
om oscar import get_version
PROJECT_DIR = os.path.dirname(__file__)
# Change to the current directory to solve an issue installing Oscar on the
# Vagrant machine.
if PROJECT_DIR:
os.chdir(PROJECT_DIR)
setup(name='django-oscar',
version=get_version().replace(' ', '-'),
url='https://github.com/tangentlabs/django-oscar',
author="David Winterbottom",
author_email="david.winterbottom@tangentlabs.co.uk",
description="A domain-driven e-commerce framework for Django",
long_description=open(os.path.join(PROJECT_DIR, 'README.rst')).read(),
keywords="E-commerce, Django, domain-driven",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django>=1.4,<1.6',
# PIL is required for image fields, Pillow is the "friendly" PIL fork
'pillow>=1.7.8,<2.0.0',
# Oscar ships with migrations
'South>=0.7.6,<0.8',
# We use the ModelFormSetView from django-extra-views for the basket
# page
'django-extra-views>=0.2,<0.6',
# We ship a simple Haystack implementation (that needs to be
# improved). We are using the 2.0-beta release from Github and
# eagerly anticipating a stable 2.0 release on PyPI.
'django-haystack==2.0.0-beta',
# Treebeard is used for categories
'django-treebeard==2.0b1',
# Sorl is used as the default thumbnailer
'sorl-thumbnail==11.12',
'python-memcached>=1.48,<1.52',
# Babel is used for currency formatting
'Babel>=0.9,<0.10',
# Oscar's default templates use compressor (but you can override
# this)
'django-compressor>=1.2,<1.4',
# For converting non-ASCII to ASCII when creating slugs
'Unidecode>=0.04.12,<0.05',
# For manipulating search URLs
'purl>=0.7'
],
dependency_links=['https://github.com/toastdriven/django-haystack/tarball/fd83d3f449c2197f93040bb3d7bc6083ea8e48b7#egg=django-haystack-2.0.0-beta'],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Other/Nonlisted Topic']
)
# Show contributing instructions if being installed in 'develop' mode
if len(sys.argv) > 1 and sys.argv[1] == 'develop':
docs_url = 'http://django-oscar.readthedocs.org/en/latest/internals/contributing/index.html'
mailing_list = 'django-oscar@googlegroups.com'
mailing_list_url = 'https://groups.google.com/forum/?fromgroups#!forum/django-oscar'
twitter_url = 'https://twitter.com/django_oscar'
msg = (
"You're installing Oscar in 'develop' mode so I presume you're thinking\n"
"of contributing:\n\n"
"(a) That's brilliant - thank you for your time\n"
"(b) If you have any questions, please use the mailing list:\n %s\n"
" %s\n"
"(c) There are more detailed contributing guidelines that you should "
"have a look at:\n %s\n"
"(d) Consider following @django_oscar on Twitter to stay up-to-date\n"
" %s\n\nHappy hacking!") % (mailing_list, mailing_list_url,
docs_url, twitter_url)
line = '=' * 82
print "\n%s\n%s\n%s" % (line, msg, line)
|
Laurawly/tvm-1
|
python/tvm/relay/op/contrib/ethosu.py
|
Python
|
apache-2.0
| 42,334
| 0.001819
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=ungrouped-imports, import-outside-toplevel
"""Arm(R) Ethos(TM)-U NPU supported operators."""
import functools
from typing import Dict, List, Tuple, Callable, Optional
import numpy as np # type: ignore
import tvm # type: ignore
from tvm import relay
from tvm.relay.expr import Constant, Call # type: ignore
from tvm.relay.op.contrib.register import register_pattern_table # type: ignore
from tvm.relay.dataflow_pattern import wildcard, is_op, is_constant, is_tuple # type: ignore
from tvm.relay.build_module import bind_params_by_name # type: ignore
try:
# As ethos-u-vela package is an optional TVM dependency, we want to lazy load it
# and check whether it is installed or not.
#
# In order to show the appropriate error messages when we try to invoke code that
# rely on imports from ethos-u-vela, we protect them with the decorator @requires_vela
# implemented below.
from ethosu.vela import api as vapi # type: ignore
except ImportError:
vapi = None
def requires_vela(func):
"""Decorator to check whether we have the required dependency ethos-u-vela
installed as a python package"""
@functools.wraps(func)
def wrapper(*args, **kwargs):
if not vapi:
raise ImportError(
"The 'ethos-u-vela' python package is required for the Arm(R) Ethos(TM)-U NPU "
"backend. Please install the d
|
ependency using your Python package manager."
) from None
return func(*args, **kwargs)
return wrapper
class TensorParams:
"""
This class will parse a tvm Expr along with quantization scale
and zero point to populate parameters that are required
for the creation of tensors in Vela.
"""
@requires_vela
|
def __init__(self, tensor, layout=None, scale=None, zero_point=None):
self.tensor = tensor
if isinstance(tensor, Constant):
self.values = tensor.data.asnumpy()
else:
self.values = None
self.dtype = tensor.checked_type.dtype
self.shape = [int(i) for i in tensor.checked_type.shape]
self.layout = layout
if scale is not None and zero_point is not None:
self.q_params = vapi.NpuQuantization(
scale.data.asnumpy().astype("float32"), zero_point.data.asnumpy().astype(self.dtype)
)
else:
# put default values
self.q_params = vapi.NpuQuantization(1.0, 0)
def check_strides(strides: List[int]) -> bool:
"""This function checks whether strides are within the limits supported by the NPU"""
stride_range = (1, 3)
smin, smax = stride_range
if not smax >= strides[0] >= smin:
return False
if not smax >= strides[1] >= smin:
return False
return True
def check_valid_dtypes(tensor_params: List[TensorParams], supported_dtypes: List[type]) -> bool:
"""This function checks whether dtypes are supported by the NPU"""
for tep in tensor_params:
# Check for dtypes
if np.dtype(tep.dtype) not in supported_dtypes:
return False
# Check for shape sizes
if any(dimlen > 65536 for dimlen in tep.shape):
return False
return True
def check_weights(weights: TensorParams, dilation: List[int]):
"""This function checks whether weight tensor is compatible with the NPU"""
from tvm.relay.backend.contrib.ethosu.util import get_dim_value
dilated_height_range = (1, 64)
dilated_hxw_range = (1, 64 * 64)
weights_limit = 127 * 65536
dilated_width = (weights.shape[get_dim_value(weights.layout, "W")] - 1) * dilation[0] + 1
dilated_height = (weights.shape[get_dim_value(weights.layout, "H")] - 1) * dilation[1] + 1
dh_min, dh_max = dilated_height_range
if not dh_min <= dilated_height <= dh_max:
return False
dilated_hxw = dilated_height * dilated_width
dhxw_min, dhxw_max = dilated_hxw_range
if not dhxw_min <= dilated_hxw <= dhxw_max:
return False
# A saturation upper bound check for accumulators
weights.values = weights.values - weights.q_params.zero_point
axis = (
get_dim_value(weights.layout, "H"),
get_dim_value(weights.layout, "W"),
get_dim_value(weights.layout, "I"),
)
sum_weights = np.amax(np.sum(np.absolute(weights.values), axis=axis))
return sum_weights <= weights_limit
def check_bias(bias: TensorParams):
"""This function checks whether the bias values fit in 40 bits"""
if bias and bias.dtype == np.dtype("int64"):
valid = all(len(bin(bias_value)[2:]) <= 40 for bias_value in bias.values)
return valid
return True
def check_batch_size(ifm: TensorParams):
"""This function checks for the number of batches vela currently supports"""
return ifm.shape[0] == 1
def check_dilation(dilation: List[int]):
"""This function checks whether dilation is within the limits supported by the NPU"""
dilation_range = (1, 2)
dmin, dmax = dilation_range
if not dmin <= dilation[0] <= dmax:
return False
if not dmin <= dilation[1] <= dmax:
return False
return True
def check_padding(padding: List[int], bounds: List[int]):
"""This function checks whether padding is within the limits supported by the NPU"""
if len(padding) != 4 or len(bounds) != 4:
return False
top, left, bottom, right = padding
topb, leftb, bottomb, rightb = bounds
return not (top > topb or left > leftb or bottom > bottomb or right > rightb)
def check_pool_shape(pool_shape: tvm.ir.container.Array) -> bool:
if len(pool_shape) != 2:
return False
if pool_shape[1] > 256:
return False
if pool_shape[0] * pool_shape[1] > 256 * 256:
return False
return True
def check_dimensions(tensor: TensorParams):
"""This function checks that the tensor has no more than 4 dimensions"""
return len(tensor.shape) <= 4
class QnnConv2DParams:
"""
This class will parse a Call to a ethosu.qnn_conv2d composite function
and extract quantization information of all the associated tensors.
"""
composite_name = "ethos-u.qnn_conv2d"
# The NPU only supports padding upto the numbers as follows
padding_bounds = [31, 31, 32, 32]
activation_map = {"clip": "CLIP"}
@requires_vela
def __init__(self, func_body: tvm.relay.Function):
from tvm.relay.backend.contrib.ethosu.util import QConv2DArgs # type: ignore
from tvm.relay.backend.contrib.ethosu.util import BiasAddArgs
from tvm.relay.backend.contrib.ethosu.util import RequantArgs
activation = None
if str(func_body.op) in self.activation_map.keys():
activation = func_body
requantize_op = activation.args[0]
else:
requantize_op = func_body
bias_add = requantize_op.args[0]
qnn_conv2d = bias_add.args[0]
data_layout = qnn_conv2d.attrs.data_layout
self.kernel_layout = qnn_conv2d.attrs.kernel_layout
# We consider the weights & biases as params as it should be a Constant
self.weights = TensorParams(
qnn_conv2d.args[QConv2DArgs.WEIGHTS.value],
self.kernel_layout,
qnn_conv2d.args[QConv2DArgs.WEIGHTS_SCALE.value],
qnn_conv2d.args[QConv2DArgs.WEIGHTS_ZERO_POINT.value],
)
self.bi
|
pyGBot/pyGBot
|
pyGBot/Plugins/system/CommandSpec/Seen.py
|
Python
|
gpl-3.0
| 3,387
| 0.004724
|
##
## pyGBot - Versatile IRC Bot
## Copyright (C) 2008 Morgan Lokhorst-Blight, Alex Soborov
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program. If not, see <http://www.gnu.org/licenses/>.
##
from datetime import datetime
from pyGBot import log
from pyGBot.Plugins.system.Commands import BaseCommand
from pyGBot.Plugins.system.Auth import AuthLevels as AL
class Seen(BaseCommand):
level = AL.User
def __init__(self, bot, channel, user, args):
args = args.strip().split()
if not args:
bot.replyout(channel, user, 'Command usage: seen <user> [channel]')
return
searchNick = args[0]
try:
searchChannel = args[1]
except IndexError:
searchChannel = None
try:
event = bot.plugins['features.Seen'].get_latest(searchN
|
ick, searchChannel)
except IndexError, e:
bot.replyout(channel, user, str(e))
return
outmessage = "The user, %s, was last seen " % event.user
if event.channel:
outmessage += "on channel %s " % event.channel
else:
outmessage += "on this network "
lastseen = datetime.now() - event.timestamp
days = lastseen.days
hours = la
|
stseen.seconds / 3600
minutes = (lastseen.seconds % 3600) / 60
seconds = lastseen.seconds % 60
timemessage = []
if days != 0:
timemessage.append("%i days" % days)
if hours != 0:
timemessage.append("%i hours" % hours)
if minutes != 0:
timemessage.append("%i minutes" % minutes)
if seconds != 0:
timemessage.append("%i seconds" % seconds)
if len(outmessage) > 0:
outmessage += ", ".join(timemessage) + " ago, "
else:
outmessage += "just now, "
if event.type == "Say":
outmessage += "saying: <%s> %s" % (event.user, event.message)
elif event.type == "Do":
outmessage += "performing the action: * %s %s" % (event.user, event.message)
elif event.type == "Msg":
outmessage += "sending me a private message."
elif event.type == "Part":
outmessage += "parting the channel."
elif event.type == "Join":
outmessage += "joining the channel."
elif event.type == "Quit":
outmessage += "quitting with the message: %s" % event.message
elif event.type == "Kick":
outmessage += "getting kicked %s" % event.message
elif event.type == "NickTo":
outmessage += "changing nick to %s." % event.message
elif event.type == "NickFrom":
outmessage += "changing nick from %s." % event.message
bot.replyout(channel, user, outmessage)
|
behconsci/sniffets-python
|
setup.py
|
Python
|
mit
| 749
| 0.001335
|
try:
from setuptools import setup
except ImportError:
from distutils.c
|
ore import setup
try:
with open('README.rst') as file:
long_description = file.read()
except IOError:
long_description = 'Python lib for sniffets.com'
setup(
name='sniffets',
packages=['sniffets'],
version='0.1.8',
long_description=long_description,
description='Python lib for sniffets.com',
author='Doniyor Jurabayev',
author_email='behconsci@gmail.com',
url='https://github.com/behconsci/sniffets-python',
download_url='https://github.com/behcons
|
ci/sniffets-python/archive/0.1.8.tar.gz',
keywords=['track', 'monitor', 'bug'],
classifiers=[],
install_requires=[
'requests', 'grequests'
],
)
|
estin/pomp
|
pomp/contrib/concurrenttools.py
|
Python
|
bsd-3-clause
| 5,193
| 0
|
"""
Concurrent downloaders
"""
import os
import sys
import signal
import logging
import itertools
from functools import partial
from concurrent.futures import ProcessPoolExecutor
from pomp.core.base import (
BaseCrawler, BaseDownloader, BaseCrawlException,
)
from pomp.contrib.urllibtools import UrllibDownloadWorker
from pomp.core.utils import iterator, Planned
log = logging.getLogger('p
|
omp.contrib.concurrent')
def _run_download_worker(params, request):
pid = os.getpid()
log.debug("Download worker pid=%s params=%s", pid, params)
try:
# Initialize worker and call get_one method
return params['worker_class'](
**params.get('worker_kwargs', {})
).process(request)
except Exception:
log.exception
|
(
"Exception on download worker pid=%s request=%s", pid, request
)
raise
def _run_crawler_worker(params, response):
pid = os.getpid()
log.debug("Crawler worker pid=%s params=%s", pid, params)
try:
# Initialize crawler worker
worker = params['worker_class'](**params.get('worker_kwargs', {}))
# process response
items = worker.extract_items(response)
next_requests = worker.next_requests(response)
if next_requests:
return list(
itertools.chain(
iterator(items),
iterator(next_requests),
)
)
return list(iterator(items))
except Exception:
log.exception(
"Exception on crawler worker pid=%s request=%s", pid, response
)
raise
class ConcurrentMixin(object):
def _done(self, request, done_future, future):
try:
response = future.result()
except Exception as e:
log.exception('Exception on %s', request)
done_future.set_result(BaseCrawlException(
request,
exception=e,
exc_info=sys.exc_info(),
))
else:
done_future.set_result(response)
class ConcurrentDownloader(BaseDownloader, ConcurrentMixin):
"""Concurrent ProcessPoolExecutor downloader
:param pool_size: size of ThreadPoolExecutor
:param timeout: request timeout in seconds
"""
def __init__(
self, worker_class,
worker_kwargs=None, pool_size=5,):
# configure executor
self.pool_size = pool_size
self.executor = ProcessPoolExecutor(max_workers=self.pool_size)
# prepare worker params
self.worker_params = {
'worker_class': worker_class,
'worker_kwargs': worker_kwargs or {},
}
# ctrl-c support for python2.x
# trap sigint
signal.signal(signal.SIGINT, lambda s, f: s)
super(ConcurrentDownloader, self).__init__()
def process(self, crawler, request):
# delegate request processing to the executor
future = self.executor.submit(
_run_download_worker, self.worker_params, request,
)
# build Planned object
done_future = Planned()
# when executor finish request - fire done_future
future.add_done_callback(
partial(self._done, request, done_future)
)
return done_future
def get_workers_count(self):
return self.pool_size
def stop(self, crawler):
self.executor.shutdown()
class ConcurrentUrllibDownloader(ConcurrentDownloader):
"""Concurrent ProcessPoolExecutor downloader for fetching data with urllib
:class:`pomp.contrib.SimpleDownloader`
:param pool_size: pool size of ProcessPoolExecutor
:param timeout: request timeout in seconds
"""
def __init__(self, pool_size=5, timeout=None):
super(ConcurrentUrllibDownloader, self).__init__(
pool_size=pool_size,
worker_class=UrllibDownloadWorker,
worker_kwargs={
'timeout': timeout
},
)
class ConcurrentCrawler(BaseCrawler, ConcurrentMixin):
"""Concurrent ProcessPoolExecutor crawler
:param pool_size: pool size of ProcessPoolExecutor
:param timeout: request timeout in seconds
"""
def __init__(self, worker_class, worker_kwargs=None, pool_size=5):
# configure executor
self.pool_size = pool_size
self.executor = ProcessPoolExecutor(max_workers=self.pool_size)
# prepare worker params
self.worker_params = {
'worker_class': worker_class,
'worker_kwargs': worker_kwargs or {},
}
# inherit ENTRY_REQUESTS from worker_class
self.ENTRY_REQUESTS = getattr(worker_class, 'ENTRY_REQUESTS', None)
def process(self, response):
# delegate response processing to the executor
future = self.executor.submit(
_run_crawler_worker, self.worker_params, response,
)
# build Planned object
done_future = Planned()
# when executor finish response processing - fire done_future
future.add_done_callback(
partial(self._done, response, done_future)
)
return done_future
|
Paul-St-Young/solid_hydrogen
|
qharv_db/dpmd_train.py
|
Python
|
mit
| 2,815
| 0.013144
|
def descriptor(rcut=4, desc_type='se_ar', mneiba=150):
smth_frac = 0.85
if desc_type == 'se_ar':
rmult = 1.5
mneibr = 500
ar_smth = rcut*smth_frac
desc = {
'type': 'se_ar',
'a': {
'sel': [mneiba],
'rcut_smth': ar_smth,
'rcut': rcut,
'neuron': [10, 20, 40],
'resnet_dt': False,
'axis_neuron': 4,
'seed': 1,
},
'r': {
'sel': [mneibr],
'rcut_smth': ar_smth*rmult,
'rcut': rcut*rmult,
'neuron': [5, 10, 20],
'resnet_dt': False,
'seed': 1
}
|
}
elif desc_type == 'se_a':
desc = {
'type': 'se_a',
'sel': [mneiba],
'rcut_smth': rcut*smth_frac,
'rcut': rcut,
'neuron': [16, 32, 64],
'resnet_dt': False,
'axis_neuron': 4,
'seed': 1,
}
else:
msg = 'please add inputs for descriptor type %s' % desc_type
raise RuntimeError(msg)
return desc
def fitting_net():
fn = {
'neuron': [240, 240, 240],
'resnet_dt': Tru
|
e,
'seed': 1
}
return fn
def loss_function():
loss = {
'start_pref_e': 0.02,
'limit_pref_e': 1,
'start_pref_f': 1000,
'limit_pref_f': 1,
'start_pref_v': 1000,
'limit_pref_v': 1
}
return loss
def calc_decay_steps(stop_batch, start_lr, stop_lr, decay_rate):
import numpy as np
decay = np.log(stop_lr/start_lr)/np.log(decay_rate)
decay_steps = int(round(stop_batch/decay))
return decay_steps
def learning_rate(stop_batch, start_lr=5e-3, stop_lr=5e-8,
decay_rate=0.95):
decay_steps = calc_decay_steps(stop_batch, start_lr, stop_lr,
decay_rate)
lr = {
'type': 'exp',
'start_lr': start_lr,
'stop_lr': stop_lr,
'decay_steps': decay_steps,
'decay_rate': decay_rate
}
return lr
def training(stop_batch, batch_size):
tr = {
'seed': 1,
'systems': ['.'],
'set_prefix': 'set',
'batch_size': batch_size,
'stop_batch': stop_batch,
}
display = {
'disp_file': 'lcurve.out',
'disp_freq': 1000,
'numb_test': 64,
'disp_training': True,
'time_training': True,
'profiling': False,
'profiling_file': 'timeline.json',
}
checkpoint = {
'save_ckpt': 'model.ckpt',
'load_ckpt': 'model.ckpt',
'save_freq': 10000,
}
tr.update(display)
tr.update(checkpoint)
return tr
def default_input(stop_batch=100000, batch_size=32,
desc_kws=None,
lr_kws=None,
):
if desc_kws is None:
desc_kws = dict()
if lr_kws is None:
lr_kws = dict()
dpmd_input = {
'model': {
'type_map': ['H'],
'descriptor': descriptor(**desc_kws),
'fitting_net': fitting_net(),
},
'loss': loss_function(),
'learning_rate': learning_rate(stop_batch, **lr_kws),
'training': training(stop_batch, batch_size)
}
return dpmd_input
|
ziwenxie/netease-dl
|
netease/weapi.py
|
Python
|
mit
| 14,637
| 0.000273
|
# -*- coding: utf-8 -*-
"""
netease-dl.weapi
~~~~~~~~~~~~~~~~
This module provides a Crawler class to get NetEase Music API.
"""
import re
import hashlib
import os
import sys
import click
import requests
from requests.exceptions import RequestException, Timeout, ProxyError
from requests.exceptions import ConnectionError as ConnectionException
from .compat import cookielib
from .encrypt import encrypted_request
from .utils import Display
from .config import headers, cookie_path, person_info_path
from .logger import get_logger
from .exceptions import (
SearchNotFound, SongNotAvailable, GetRequestIllegal, PostRequestIllegal)
from .models import Song, Album, Artist, Playlist, User
LOG = get_logger(__name__)
def exception_handle(method):
"""Handle exception raised by requests library."""
def wrapper(*args, **kwargs):
try:
result = method(*args, **kwargs)
return result
except ProxyError:
LOG.exception('ProxyError when try to get %s.', args)
raise ProxyError('A proxy error occurred.')
except ConnectionException:
LOG.exception('ConnectionError when try to get %s.', args)
raise ConnectionException('DNS failure, refused connection, etc.')
except Timeout:
LOG.exception('Timeout when try to get %s', args)
raise Timeout('The request timed out.')
except RequestException:
LOG.exception('RequestException when try to get %s.', args)
raise RequestException('Please check out your network.')
return wrapper
class Crawler(object):
"""NetEase Music API."""
def __init__(self, timeout=60, proxy=None):
self.session = requests.Session()
self.session.headers.update(headers)
self.session.cookies = cookielib.LWPCookieJar(cookie_path)
self.download_session = requests.Session()
self.timeout = timeout
self.proxies = {'http': proxy, 'https': proxy}
self.display = Display()
@exception_handle
def get_request(self, url):
"""Send a get request.
warning: old api.
:return: a dict or raise Exception.
"""
resp = self.session.get(url, timeout=self.timeout,
proxies=self.proxies)
result = resp.json()
if result['code'] != 200:
LOG.error('Return %s when try to get %s', result, url)
raise GetRequestIllegal(result)
else:
return result
@exception_handle
def post_request(self, url, params):
"""Send a post request.
:return: a dict or raise Exception.
"""
data = encrypted_request(params)
resp = self.session.post(url, data=data, timeout=self.timeout,
proxies=self.proxies)
result = resp.json()
if result['code'] != 200:
LOG.error('Return %s when try to post %s => %s',
result, url, params)
raise PostRequestIllegal(result)
else:
return result
def search(self, search_content, search_type, limit=9):
"""Search entrance.
:params search_content: search content.
:params search_type: search type.
:params limit: result count returned by weapi.
:return: a dict.
"""
url = 'http://music.163.com/weapi/cloudsearch/get/web?csrf_token='
params = {'s': search_content, 'type': search_type, 'offset': 0,
'sub': 'false', 'limit': limit}
result = self.post_request(url, params)
return result
def search_song(self, song_name, quiet=False, limit=9):
"""Search song by song name.
:params song_name: song name.
:params quiet: automatically select the best one.
:params limit: song count returned by weapi.
:return: a Song object.
"""
result = self.search(song_name, search_type=1, limit=limit)
if result['result']['songCount'] <= 0:
LOG.warning('Song %s not existed!', song_name)
raise SearchNotFound('Song {} not existed.'.format(song_name))
else:
songs = result['result']['songs']
if quiet:
song_id, song_name = songs[0]['id'], songs[0]['name']
song = Song(song_id, song_name)
return song
else:
return self.display.select_one_song(songs)
def search_album(self, album_name, quiet=False, limit=9):
"""Search album by album name.
:params album_name: album name.
:params quiet: automatically select the best one.
:params limit: album count returned by weapi.
:return: a Album object.
"""
result = self.search(album_name, search_type=10, limit=limit)
if result['result']['albumCount'] <= 0:
LOG.warning('Album %s not existed!', album_name)
raise SearchNotFound('Album {} not existed'.format(album_name))
else:
albums = result['result']['albums']
if quiet:
album_id, album_name = albums[0]['id'], albums[0]['name']
album = Album(album_id, album_name)
return album
else:
return self.display.select_one_album(albums)
def search_artist(self, artist_name, quiet=False, limit=9):
"""Search artist by artist name.
:params artist_name: artist name.
:params quiet: automatically select the best one.
:params limit: artist count returned by weapi.
:return: a Artist object.
"""
result = self.search(artist_name, search_type=100, limit=limit)
if result['result']['artistCount'] <= 0:
LOG.warning('Artist %s not existed!', artist_name)
raise SearchNotFound('Artist {} not existed.'.format(artist_name))
else:
artists = result['result']['artists']
if quiet:
artist_id, artist_name = artists[0]['id'], artists[0]['name']
artist = Artist(artist_id, artist_name)
return artist
else:
return self.display.select_one_artist(artists)
def search_playlist(self, playlist_name, quiet=False, limit=9):
"""Search playlist by playlist name.
:params playlist_name: playlist name.
:params quiet: automatic
|
ally select the best one.
:params limit: playlist count returned by weapi.
|
:return: a Playlist object.
"""
result = self.search(playlist_name, search_type=1000, limit=limit)
if result['result']['playlistCount'] <= 0:
LOG.warning('Playlist %s not existed!', playlist_name)
raise SearchNotFound('playlist {} not existed'.format(playlist_name))
else:
playlists = result['result']['playlists']
if quiet:
playlist_id, playlist_name = playlists[0]['id'], playlists[0]['name']
playlist = Playlist(playlist_id, playlist_name)
return playlist
else:
return self.display.select_one_playlist(playlists)
def search_user(self, user_name, quiet=False, limit=9):
"""Search user by user name.
:params user_name: user name.
:params quiet: automatically select the best one.
:params limit: user count returned by weapi.
:return: a User object.
"""
result = self.search(user_name, search_type=1002, limit=limit)
if result['result']['userprofileCount'] <= 0:
LOG.warning('User %s not existed!', user_name)
raise SearchNotFound('user {} not existed'.format(user_name))
else:
users = result['result']['userprofiles']
if quiet:
user_id, user_name = users[0]['userId'], users[0]['nickname']
user = User(user_id, user_name)
return user
else:
return self.display.select_one_user(users)
def get_user_playlists(self, user_id, limit=1000):
"""Get a user's all play
|
FrancescElies/bquery
|
bquery/benchmarks/bench_groupby.py
|
Python
|
bsd-3-clause
| 2,373
| 0.000421
|
from __future__ import print_function
# bench related imports
import numpy as np
import shutil
import bquery
import pandas as pd
import itertools as itt
import cytoolz
import cytoolz.dicttoolz
from toolz import valmap, compose
from cytoolz.curried import pluck
import blaze as blz
# other imports
import contextlib
import tempfile
import os
import time
t_elapsed = 0.0
@contextlib.contextmanager
def ctime(message=None):
"Counts the time spent in some context"
global t_elapsed
t_elapsed = 0.0
print('\n')
t = time.time()
yield
if message:
print(message + ": ", end='')
t_elapsed = time.time() - t
print(round(t_elapsed, 4), "sec")
ga = itt.cycle(['ES', 'NL'])
gb = itt.cycle(['b1', 'b2', 'b3', 'b4', 'b5'])
gx = itt.cycle([1, 2])
gy = itt.cycle([-1, -2])
rootdir = 'bench-data.bcolz'
if os.path.exists(rootdir):
shutil.rmtree(rootdir)
n_rows = 1000000
print('Rows: ', n_rows)
# -- data
z = np.fromiter(((a, b, x, y) for a, b, x, y in itt.izip(ga, gb, gx, gy)),
dtype='S2,S2,i8,i8', count=n_rows)
ct = bquery.ctable(z, rootdir=rootdir, )
print(ct)
# -- pandas --
df = pd.DataFrame(z)
with ctime(message='pandas'):
result = df.groupby(['f0'])['f2'].sum()
print(result)
t_pandas = t_elapsed
# -- cytoolz --
with ctime(message='cytoolz over bcolz'):
# In Memory Split-Apply-Combine
# http://toolz.readthedocs.org/en/latest/streaming-analytics.html?highlight=reduce#split-apply-combine-with-groupby-and-reduceby
r = cytoolz.groupby(lambda row: row.f0, ct)
result = valmap(compose(sum, pluck(2)), r)
print('x{0} slower than pandas'.format(round(t_
|
elapsed/t_pa
|
ndas, 2)))
print(result)
# -- blaze + bcolz --
blaze_data = blz.Data(ct.rootdir)
expr = blz.by(blaze_data.f0, sum_f2=blaze_data.f2.sum())
with ctime(message='blaze over bcolz'):
result = blz.compute(expr)
print('x{0} slower than pandas'.format(round(t_elapsed/t_pandas, 2)))
print(result)
# -- bquery --
with ctime(message='bquery over bcolz'):
result = ct.groupby(['f0'], ['f2'])
print('x{0} slower than pandas'.format(round(t_elapsed/t_pandas, 2)))
print(result)
ct.cache_factor(['f0'], refresh=True)
with ctime(message='bquery over bcolz (factorization cached)'):
result = ct.groupby(['f0'], ['f2'])
print('x{0} slower than pandas'.format(round(t_elapsed/t_pandas, 2)))
print(result)
shutil.rmtree(rootdir)
|
sheeshmohsin/shopping_site
|
app/cart/urls.py
|
Python
|
mit
| 546
| 0.007326
|
from django.conf.urls import patterns, include, url
from
|
django.conf.urls.static import static
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'app.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^add/$', 'cart.views.add'),
url(r'^clear/$', 'cart.views.clear'),
url(r'^show/$', 'cart.views.show'),
url(r'^remove/(?P<pk>\d+)/$', 'cart.views.remove'),
url(r'^checkout/$', 'c
|
art.views.checkout'),
)
|
robotics-silver-surfer/surfer-main
|
lab3/hoverboard/src/hoverboard/msg/_ServoRaw.py
|
Python
|
mit
| 5,572
| 0.018844
|
"""autogenerated by genpy from hoverboard/ServoRaw.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import std_msgs.msg
class ServoRaw(genpy.Message):
_md5sum = "cf1c9d17f7bbedbe8dd2c29cdb7700f8"
_type = "hoverboard/ServoRaw"
_has_header = True #flag to mark the presence of a Header object
_full_text = """Header header
# The servo port 0 through 5 inclusive
int8 port
# The value to send 0 to 100 inclusive
int8 value
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.secs: seconds (stamp_secs) since epoch
# * stamp.nsecs: nanoseconds since stamp_secs
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string
|
frame_id
"""
__slots__ =
|
['header','port','value']
_slot_types = ['std_msgs/Header','int8','int8']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,port,value
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(ServoRaw, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.port is None:
self.port = 0
if self.value is None:
self.value = 0
else:
self.header = std_msgs.msg.Header()
self.port = 0
self.value = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_2b.pack(_x.port, _x.value))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 2
(_x.port, _x.value,) = _struct_2b.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_2b.pack(_x.port, _x.value))
except struct.error as se: self._check_types(se)
except TypeError as te: self._check_types(te)
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 2
(_x.port, _x.value,) = _struct_2b.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_3I = struct.Struct("<3I")
_struct_2b = struct.Struct("<2b")
|
mstepniowski/simpledb
|
search.py
|
Python
|
bsd-2-clause
| 2,080
| 0.001923
|
import sys
import struct
ULL_BYTES = struct.calcsize('!Q')
SHORT_BYTES = struct.calcsize('!h')
INDEX_HEADER = 'IDX'
def search(dbfile, prefix):
"""Returns all words having a given prefix using a dbfile."""
idx = Index.from_file(dbfile)
for letter in prefix:
if letter not in idx.nodes:
print 'No completions'
return
else:
idx = Index.from_file(dbfile, idx.nodes[letter])
if idx.nodes:
print 'Completions:'
for completion in idx.nodes:
print ' - ' + completion
if idx.leafs:
print 'Pattern found in files:
|
'
for path in idx.leafs:
print ' - ' + path
class Index(object):
def __init__(self, data):
self.data = data
self.no
|
des = {}
self.leafs = []
self.parse()
@classmethod
def from_file(self, f, offset=0):
f.seek(offset)
size_data = f.read(len(INDEX_HEADER) + ULL_BYTES)
header, index_size = struct.unpack('!%dsQ' % len(INDEX_HEADER), size_data)
if header != INDEX_HEADER:
import ipdb; ipdb.set_trace()
data = f.read(index_size - ULL_BYTES - len(INDEX_HEADER))
return Index(data)
def parse(self):
node_count, = struct.unpack_from('!Q', self.data)
offset = ULL_BYTES
# Read nodes
for n in range(node_count):
letter_bytes, = struct.unpack_from('!h', self.data, offset)
offset += SHORT_BYTES
letter, index_offset = struct.unpack_from('!%dsQ' % letter_bytes, self.data, offset)
offset += letter_bytes + ULL_BYTES
self.nodes[letter] = index_offset
# Read leafs
while offset < len(self.data):
path_bytes, = struct.unpack_from('!h', self.data, offset)
offset += SHORT_BYTES
path, = struct.unpack_from('!%ds' % path_bytes, self.data, offset)
offset += path_bytes
self.leafs.append(path)
if __name__ == '__main__':
f = file(sys.argv[1], 'rb')
search(f, sys.argv[2])
|
DavideCanton/Python3
|
audio/freq.py
|
Python
|
gpl-3.0
| 663
| 0.001508
|
import numpy as np
import struct
import wave
from winso
|
und import PlaySound, SND_FILENAME, SND_ASYNC
import matplotlib.pyplot as plt
CHUNK = 1 << 8
def play(filename):
PlaySound(filename, SND_FILENAME | SND_ASYNC)
fn = r"D:\b.wav"
f = wave.open(fn)
print(f.getparams())
ch = f.getnchannels()
|
sw = f.getsampwidth()
n = f.getnframes()
data = bytearray()
while len(data) < n * ch * sw:
data.extend(f.readframes(CHUNK))
data = np.array(struct.unpack('{n}h'.format(n=n * ch), data))
w = np.fft.fft(data)
freqs = np.fft.fftfreq(len(w))
module = np.abs(w)
idmax = module.argmax()
print(abs(freqs[idmax]) * f.getframerate())
plt.specgram(data)
plt.show()
|
dennisobrien/bokeh
|
bokeh/sampledata/tests/test_commits.py
|
Python
|
bsd-3-clause
| 1,961
| 0.010199
|
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2017, Anaconda, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#---------------------------------------------
|
--------------------------------
# Standard library imports
# External imports
# Bokeh imports
from bokeh._testing.util.api import verify_all
# Module under test
#import bokeh.sampledata.commits as bsc
#-----------------------------------------------------------------------------
# Setup
#------------------------------------------------------------------------
|
-----
ALL = (
'data',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
Test___all__ = pytest.mark.sampledata(verify_all("bokeh.sampledata.commits", ALL))
@pytest.mark.sampledata
def test_data(pd):
import bokeh.sampledata.commits as bsc
assert isinstance(bsc.data, pd.DataFrame)
# check detail for package data
assert len(bsc.data) == 4916
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
|
khchine5/book
|
lino_book/projects/homeworkschool/__init__.py
|
Python
|
bsd-2-clause
| 254
| 0
|
# -*- coding: UTF-8 -*-
__copyright__ = """\
Copyright (c) 2012-2013 Luc Saffre.
This software comes with ABSOLUTELY NO WARRANTY and is
distributed under the terms of the GNU L
|
esser General Public License.
See file COPYING.txt f
|
or more information."""
|
tikan/rmock
|
src/rmock/runners/http/proxy/handler.py
|
Python
|
lgpl-3.0
| 2,108
| 0.004744
|
# coding=utf8
#
# Copyright 2013 Dreamlab Onet.pl
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation;
# version 3.0.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, visit
#
# http://www.gnu.org/licenses/lgpl.txt
#
import logging
from rmock.runners.http.handler import MockHttpHandl
|
er
from rmock.runners.http.handler import with_exception_handling
from rmock.core.call import Call
from rmock.runners.http.handler import HttpCode
logger = logging.getLogger("rmock.http-proxy")
class ProxyMockHttpHandler(MockHttpHandler):
@with_exception_handling
def initi
|
alize(self,
rmock_data,
protocol_class,
protocol_args,
slug,
childs,
child_chooser):
super(ProxyMockHttpHandler, self).initialize(
rmock_data,
protocol_class,
protocol_args,
slug
)
self.childs = childs
self.child_chooser = child_chooser
def _process_function_call_impl(self, funcname, args, kwargs, headers):
data = Call._make(funcname=funcname,
args=args,
kwargs=kwargs,
headers=headers)
mock = self.child_chooser(data, self.childs)
if mock is None:
logger.info("404: matching mock not found")
return HttpCode(404)
logger.info("proxying request to: %s", mock.name)
return mock._rmock_data.register_call_and_get_result(
funcname, args, kwargs,
headers=headers
)
|
BlackstoneEngineering/yotta
|
yotta/test/ignores.py
|
Python
|
apache-2.0
| 6,131
| 0.008481
|
#!/usr/bin/env python
# Copyright 2015 AR
|
M Limited
#
# Licensed under the Apache License, Version 2.0
# See LICENSE file for details.
# standard library modules, , ,
import unittest
import os
import tempfile
# internal modules:
from yotta.lib.fsutil
|
s import mkDirP, rmRf
from yotta.lib.detect import systemDefaultTarget
from yotta.lib import component
from .cli import cli
Test_Files = {
'.yotta_ignore': '''
#comment
/moo
b/c/d
b/c/*.txt
/a/b/test.txt
b/*.c
/source/a/b/test.txt
/test/foo
sometest/a
someothertest
ignoredbyfname.c
''',
'module.json': '''
{
"name": "test-testdep-f",
"version": "0.0.6",
"description": "Module to test test-dependencies and ignoring things",
"author": "autopulated",
"licenses": [
{
"url": "https://spdx.org/licenses/Apache-2.0",
"type": "Apache-2.0"
}
],
"dependencies": {},
"testDependencies": {}
}
''',
'a/b/c/d/e/f/test.txt': '',
'a/b/c/d/e/test.c': '#error should be ignored',
'a/b/c/d/e/test.txt': '',
'a/b/c/d/test.c': '#error should be ignored',
'a/b/c/d/test.txt': '',
'a/b/c/d/z/test.c':'#error should be ignored',
'a/b/c/test.txt': '',
'a/b/test.txt':'',
'a/test.txt':'',
'comment':'# should not be ignored',
'f/f.h':'''
#ifndef __F_H__
#define __F_H__
int f();
#endif
''',
'source/moo/test.txt':'',
'source/a/b/c/d/e/f/test.txt': '',
'source/a/b/c/d/e/test.c': '#error should be ignored',
'source/a/b/c/d/e/test.txt': '',
'source/a/b/c/d/test.c': '#error should be ignored',
'source/a/b/c/d/test.txt': '',
'source/a/b/c/d/z/test.c':'#error should be ignored',
'source/a/b/c/test.txt': '',
'source/a/b/test.txt':'',
'source/a/test.txt':'',
'source/f.c':'''
int f(){
return 6;
}
''',
'test/anothertest/ignoredbyfname.c':'#error should be ignored',
'test/anothertest/ignoredbyfname.c':'''
#include <stdio.h>
#include "f/f.h"
int main(){
int result = f();
printf("%d\n", result);
return !(result == 6);
}
''',
'test/foo/ignored.c':'''
#error should be ignored
''',
'test/someothertest/alsoignored.c':'''
#error should be ignored
''',
'test/sometest/a/ignored.c':'''
#error should be ignored
'''
}
Default_Test_Files = {
'module.json': '''
{
"name": "test-testdep-f",
"version": "0.0.6",
"license": "Apache-2.0"
}'''
}
def isWindows():
# can't run tests that hit github without an authn token
return os.name == 'nt'
def writeTestFiles(files):
test_dir = tempfile.mkdtemp()
for path, contents in files.items():
path_dir, file_name = os.path.split(path)
path_dir = os.path.join(test_dir, path_dir)
mkDirP(path_dir)
with open(os.path.join(path_dir, file_name), 'w') as f:
f.write(contents)
return test_dir
class TestPackIgnores(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.test_dir = writeTestFiles(Test_Files)
@classmethod
def tearDownClass(cls):
rmRf(cls.test_dir)
def test_absolute_ignores(self):
c = component.Component(self.test_dir)
self.assertTrue(c.ignores('moo'))
self.assertTrue(c.ignores('test/foo/ignored.c'))
def test_glob_ignores(self):
c = component.Component(self.test_dir)
self.assertTrue(c.ignores('a/b/c/test.txt'))
self.assertTrue(c.ignores('a/b/test.txt'))
self.assertTrue(c.ignores('a/b/test.c'))
self.assertTrue(c.ignores('source/a/b/c/test.txt'))
self.assertTrue(c.ignores('source/a/b/test.txt'))
self.assertTrue(c.ignores('source/a/b/test.c'))
def test_relative_ignores(self):
c = component.Component(self.test_dir)
self.assertTrue(c.ignores('a/b/c/d/e/f/test.txt'))
self.assertTrue(c.ignores('a/b/test.txt'))
self.assertTrue(c.ignores('source/a/b/c/d/e/f/test.txt'))
self.assertTrue(c.ignores('source/a/b/test.txt'))
self.assertTrue(c.ignores('test/anothertest/ignoredbyfname.c'))
self.assertTrue(c.ignores('test/someothertest/alsoignored.c'))
def test_default_ignores(self):
default_test_dir = writeTestFiles(Default_Test_Files)
c = component.Component(default_test_dir)
self.assertTrue(c.ignores('.something.c.swp'))
self.assertTrue(c.ignores('.something.c~'))
self.assertTrue(c.ignores('path/to/.something.c.swm'))
self.assertTrue(c.ignores('path/to/.something.c~'))
self.assertTrue(c.ignores('.DS_Store'))
self.assertTrue(c.ignores('.git'))
self.assertTrue(c.ignores('.hg'))
self.assertTrue(c.ignores('.svn'))
self.assertTrue(c.ignores('yotta_modules'))
self.assertTrue(c.ignores('yotta_targets'))
self.assertTrue(c.ignores('build'))
self.assertTrue(c.ignores('.yotta.json'))
rmRf(default_test_dir)
def test_comments(self):
c = component.Component(self.test_dir)
self.assertFalse(c.ignores('comment'))
@unittest.skipIf(isWindows(), "can't build natively on windows yet")
def test_build(self):
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'clean'], self.test_dir)
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'build'], self.test_dir)
self.assertNotIn('ignoredbyfname', stdout)
self.assertNotIn('someothertest', stdout)
self.assertNotIn('sometest', stdout)
@unittest.skipIf(isWindows(), "can't build natively on windows yet")
def test_test(self):
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'clean'], self.test_dir)
stdout = self.runCheckCommand(['--target', systemDefaultTarget(), 'test'], self.test_dir)
self.assertNotIn('ignoredbyfname', stdout)
self.assertNotIn('someothertest', stdout)
self.assertNotIn('sometest', stdout)
def runCheckCommand(self, args, test_dir):
stdout, stderr, statuscode = cli.run(args, cwd=self.test_dir)
if statuscode != 0:
print('command failed with status %s' % statuscode)
print(stdout)
print(stderr)
self.assertEqual(statuscode, 0)
return stdout or stderr
if __name__ == '__main__':
unittest.main()
|
jobovy/apogee-maps
|
py/define_rgbsample.py
|
Python
|
bsd-3-clause
| 20,638
| 0.02413
|
import math
import numpy
import statsmodels.api as sm
lowess= sm.nonparametric.lowess
import esutil
from galpy.util import bovy_coords, bovy_plot
from scipy.interpolate import interp1d,UnivariateSpline
import apogee.tools.read as apread
import isodist
import numpy as np
import matplotlib.pyplot as plt
import os
import pickle
import apogee.tools.read as apread
from apogee.select import apogeeSelect
from astropy.io import fits
from astropy.table import Table, join
_R0= 8. # kpc
_Z0= 0.025 # kpc
_FEHTAG= 'FE_H'
_AFETAG= 'AVG_ALPHAFE'
_AFELABEL= r'$[\left([\mathrm{O+Mg+Si+S+Ca}]/5\right)/\mathrm{Fe}]$'
catpath = '../catalogues/'
selectFile= '../savs/selfunc-nospdata.sav'
if os.path.exists(selectFile):
with open(selectFile,'rb') as savefile:
apo= pickle.load(savefile)
def get_rgbsample(loggcut = [1.8, 3.0],
teffcut = [0, 10000],
add_ages = False,
agetype='Martig',
apply_corrections=False,
distance_correction=False,
verbose = False):
"""
Get a clean sample of dr12 APOGEE data with Michael Haydens distances
---
INPUT:
None
OUTPUT:
Clean rgb sample with added distances
HISTORY:
Started - Mackereth 02/06/16
"""
#get the allStar catalogue using apogee python (exlude all bad flags etc)
allStar = apread.allStar(rmcommissioning=True,
exclude_star_bad=True,
exclude_star_warn=True,
main=True,
ak=True,
adddist=False)
#cut to a 'sensible' logg range (giants which are not too high on the RGB)
allStar = allStar[(allStar['LOGG'] > loggcut[0])&(allStar['LOGG'] < loggcut[1])&
(allStar['TEFF'] > teffcut[0])&(allStar['TEFF'] < teffcut[1])]
if verbose == True:
print str(len(allStar))+' Stars before Distance catalogue join (after Log(g) cut)'
#load the distance VAC
dists = fits.open(catpath+'DR12_DIST_R-GC.fits')[1].data
#convert to astropy Table
allStar_tab = Table(data=allStar)
dists_tab = Table(data=dists)
#join table
tab = join(allStar_tab, dists_tab, keys='APOGEE_ID', uniq_col_name='{col_name}{table_name}', table_names=['','2'])
data = tab.as_array()
data= esutil.numpy_util.add_fields(data,[('M_J', float),
('M_H', float),
('M_K', float),
('MH50_DIST', float),
('MH50_GALR', float),
('MH50_GALZ', float),
('MH50_GALPHI', float),
('AVG_ALPHAFE', float)])
data['MH50_DIST'] = (10**((data['HAYDEN_DISTMOD_50']+5)/5))/1e3
if distance_correction == True:
data['MH50_DIST'] *= 1.05
XYZ= bovy_coords.lbd_to_XYZ(data['GLON'],
data['GLAT'],
data['MH50_DIST'],
degree=True)
RphiZ= bovy_coords.XYZ_to_galcencyl(XYZ[:,0],
XYZ[:,1],
XYZ[:,2],
Xsun=8.,Zsun=0.025)
data['MH50_GALR']= RphiZ[:,0]
data['MH50_GALPHI']= RphiZ[:,1]
data['MH50_GALZ']= RphiZ[:,2]
data['M_J'] = data['J0']-data['HAYDEN_DISTMOD_50']
data['M_H'] = data['H0']-data['HAYDEN_DISTMOD_50']
data['M_K'] = data['K0']-data['HAYDEN_DISTMOD_50']
data['AVG_ALPHAFE'] = avg_alphafe_dr12(data)
data[_FEHTAG] += -0.1
#remove locations not in the apogee selection function (FIND OUT WHATS UP HERE)
data = data[np.in1d(data['LOCATION_ID'], apo.list_fields())]
# Remove locations outside of the Pan-STARRS dust map
# In the Southern hemisphere
data= data[data['LOCATION_ID'] != 4266] #240,-18
data= data[data['LOCATION_ID'] != 4331] #5.5,-14.2
data= data[data['LOCATION_ID'] != 4381] #5.2,-12.2
data= data[data['LOCATION_ID'] != 4332] #1,-4
data= data[data['LOCATION_ID'] != 4329] #0,-5
data= data[data['LOCATION_ID'] != 4351] #0,-2
data= data[data['LOCATION_ID'] != 4353] #358,0
data= data[data['LOCATION_ID'] != 4385] #358.6,1.4
# Close to the ecliptic pole where there's no data (is it the ecliptic pole?
data= data[data['LOCATION_ID'] != 4528] #120,30
data= data[data['LOCATION_ID'] != 4217] #123,22.4
#remove any non-finite magnitudes
data = data[np.isfinite(data['M_H'])]
if verbose == True:
print str(len(data))+' Stars with distance measures (and in good fields...)'
if add_ages == True:
if agetype == 'Martig':
ages = fits.open(catpath+'DR12_martigages_vizier.fits')[1].data
idtag = '2MASS_ID'
if agetype == 'Cannon':
ages = fits.open(catpath+'RGB_Cannon_Ages.fits')[1].data
ages = esutil.numpy_util.add_fields(ages,[('Age', float)])
ages['Age'] = np.exp(ages['ln_age'])
idtag = 'ID'
ages_tab = Table(data=ages)
ages_tab.rename_column(idtag, 'APOGEE_ID')
tab = join( ages_tab,data, keys='APOGEE_ID', uniq_col_name='{col_name}{table_name}', table_names=['','2'])
allStar_full = tab.as_array()
data = allStar_full
if verbose == True:
print str(len(data))+' Stars with ages'
if apply_corrections == True:
#martig1 = np.genfromtxt(catpath+'martig2016_table1.txt', dtype=None, names=True, skip_header=2)
martig1 = fits.open(catpath+'martig_table1.fits')
fit = lowess(np.log10(martig1['Age_out']),np.log10(martig1['Age_in']))
xs = np.linspace(-0.3,1.2,100)
xsinterpolate = interp1d(xs,xs)
fys = fit[:,0]-xsinterpolate(fit[:,1])
interp = UnivariateSpline(fit[:,1], fys)
corr_age = np.log10(data['Age'])+(interp(np.log10(data['Age'])))
corr_age = 10**corr_age
data['Age'] = corr_age
return data
def avg_alphafe_dr12(data):
weight_o= np.ones(len(data))
weight_s= np.ones(len(data))
weight_si= np.ones(len(data))
weight_ca= np.ones(len(data))
weight_mg= np.ones(len(data))
weight_o[data['O_H'] == -9999.0]= 0.
weight_s[data['S_H'] == -9999.0]= 0.
weight_si[data['SI_H'] == -9999.0]= 0.
weight_ca[data['CA_H'] == -9999.0]= 0.
weight_mg[data['MG_H'] == -9999.0]= 0.
return (weight_o*data['O_H']+weight_s*data['S_H']
+weight_si*data['SI_H']+weight_ca*data['CA_H']
+weight_mg*data['MG_H'])/(weight_o+weight_s
+weight_si+weight_ca
+weight_mg)\
-data['FE_H']-0.05
# Define the low-alpha, low-iron sample
def _lowlow_lowfeh(afe):
# The low metallicity edge
return -0.6
def _lowlow_highfeh(afe):
# The high metallicity edge
return -0.25
def _lowlow_lowafe(feh):
# The low alpha edge (-0.15,-0.075) to (-0.5,0)
return (0--0.075)/(-0.5--0.15)*(feh+0.1--0.15)-0.075
def
|
_lowlow_highafe(feh):
# The high alpha edge (-0.15,0.075) to (-0.5,0.15)
return (0.15-0.075)/(-0.5--0.15)*(feh+0.1--0.15)+0.075
def get_lowlowsample():
"""
NAME:
get_lowlowsample
PURP
|
OSE:
get the RGB sample at low alpha, low iron
INPUT:
None so far
OUTPUT:
sample
HISTORY:
2015-03-18 - Started - Bovy (IAS)
2016-07-02 - modification - Mackereth (LJMU)
"""
# Get the full sample first
data= get_rgbsample()
# Now cut it
lowfeh= _lowlow_lowfeh(0.)
highfeh= _lowlow_highfeh(0.)
indx= (data[_FEHTAG] > lowfeh)*(data[_FEHTAG] <= highfeh)\
*(data[_AFETAG] > _lowlow_lowafe(data[_FEHTAG]))\
*(data[_AFETAG] <= _lowlow_highafe(data[_FEHTAG]))
return data[indx]
# Define the high-alpha sample
def _highalpha_lowfeh(afe):
# The low metallicity edge
return -0.8
def _highalpha_highfeh(afe):
# The high metallicity edge
return -0.2
def _highalpha_lowafe(feh):
# The low alpha edge (-0.125,0.115) to (-0.6,0.215)
return (0.2-0.1)/(-0.6--0.125)*(feh+0.1--0.125)+0.115
def _highalpha_highafe(feh):
# The high alpha edge (-0.125,0.19) to (-0.6,0.29)
return (0.275-0.175)/(-0.6--0.125)*(feh+0.1--0.125)+0.19
def get_highalphasample():
"""
NAME:
get_highalphasample
PURPOSE:
get the RC sample at high alpha
INPUT:
None so far
OUTPUT:
sample
|
mtils/ems
|
ems/resource/dict_attribute_repository.py
|
Python
|
mit
| 1,547
| 0.001939
|
from ems.typehint import accepts
from ems.resource.repository import Repository
class DictAttributeRepository(Repository):
@accepts(Repository)
def __init__(self, sourceRepo, sourceAttribute='data'):
self._sourceRepo = sourceRepo
self.sourceAttribute = sourceAttribute
def get(self, id_):
"""
Return an object by its id
:returns: dict
"""
model = self._sourceRepo.get(id_)
data = getattr(model, self.sourceAttribute)
data['ID'] = id_
return data
def new(self, attributes=None):
"""
Instantiate an object
:returns: object
"""
model = self._sourceRepo.new()
data = getattr(model, self.sourceAttribute)
for key in attributes:
data[key] = attributes[key]
return data
def store(self, attribut
|
es, obj=None):
"""
Store a new object. Create on if non passed, if one passed store the
passed one
:
|
returns: object
"""
if obj:
raise TypeError("Obj has to be None")
sourceAttributes = {self.sourceAttribute:self.new(attributes)}
if 'ID' not in sourceAttributes:
raise KeyError("attributes have to contain ")
model = self._sourceRepo.store(sourceAttributes)
return getattr(model, self.sourceAttribute)
def update(self, model, changedAttributes):
"""
Update model by changedAttributes and save it
:returns: object
"""
pass
|
jansohn/pyload
|
module/plugins/accounts/ShareonlineBiz.py
|
Python
|
gpl-3.0
| 2,110
| 0.010427
|
# -*- coding: utf-8 -*-
import re
from module.plugins.internal.Account import Account
from module.plugins.internal.Plugin import set_cookie
class ShareonlineBiz(Account):
__name__ = "ShareonlineBiz"
__type__ = "account"
__version__ = "0.41"
__status__ = "testing"
__description__ = """Share-online.biz account plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
def api_response(self, user, password):
res = self.load("https://api.share-online.biz/cgi-bin",
get={'q' : "userdetails",
'aux' : "traffic",
'username': user,
'password': password},
decode=False)
self.log_debug(res)
api = dict(line.split("=") for line in res.splitlines() if "=" in line)
if not 'a' in api:
self.fail_login(res.strip('*'))
if api['a'].lower() == "not_available":
self.fail_login(_("No info available"))
return api
def grab_info(self, user, password, data):
|
premium = False
va
|
liduntil = None
trafficleft = -1
maxtraffic = 100 * 1024 * 1024 * 1024 #: 100 GB
api = self.api_response(user, password)
premium = api['group'] in ("PrePaid", "Premium", "Penalty-Premium")
validuntil = float(api['expire_date'])
traffic = float(api['traffic_1d'].split(";")[0])
if maxtraffic > traffic:
trafficleft = maxtraffic - traffic
else:
trafficleft = -1
maxtraffic /= 1024 #@TODO: Remove `/ 1024` in 0.4.10
trafficleft /= 1024 #@TODO: Remove `/ 1024` in 0.4.10
return {'premium' : premium,
'validuntil' : validuntil,
'trafficleft': trafficleft,
'maxtraffic' : maxtraffic}
def signin(self, user, password, data):
api = self.api_response(user, password)
set_cookie(self.req.cj, "share-online.biz", 'a', api['a'])
|
gautampanday/nereid-webshop
|
tests/test_css.py
|
Python
|
bsd-3-clause
| 1,062
| 0.000942
|
# -*- coding: utf-8 -*-
"""
CSS Testing
:copyright: (C) 2014 by Openlabs Technologies & Consulting (P) Limited
:license: BSD, see LICENSE for more details.
"""
from os.path import join
from cssutils import CSSParser
import unittest
import trytond
|
.tests.test_tryton
dir = 'static/css/'
class CSSTest(unittest.TestCase):
"""
Test case for CSS.
|
"""
def validate(self, filename):
"""
Uses cssutils to validate a css file.
Prints output using a logger.
"""
CSSParser(raiseExceptions=True).parseFile(filename, validate=True)
def test_css(self):
"""
Test for CSS validation using W3C standards.
"""
cssfile = join(dir, 'style.css')
self.validate(cssfile)
def suite():
"""
Define suite
"""
test_suite = trytond.tests.test_tryton.suite()
test_suite.addTests(
unittest.TestLoader().loadTestsFromTestCase(CSSTest)
)
return test_suite
if __name__ == '__main__':
unittest.TextTestRunner(verbosity=2).run(suite())
|
CredoReference/edx-platform
|
common/djangoapps/third_party_auth/pipeline.py
|
Python
|
agpl-3.0
| 35,110
| 0.003019
|
"""Auth pipeline definitions.
Auth pipelines handle the process of authenticating a user. They involve a
consumer system and a provider service. The general pattern is:
1. The consumer system exposes a URL endpoint that starts the process.
2. When a user visits that URL, the client system redirects the user to a
page served by the provider. The user authenticates with the provider.
The provider handles authentication failure however it wants.
3. On success, the provider POSTs to a URL endpoint on the consumer to
invoke the pipeline. It sends back an arbitrary payload of data about
the user.
4. The pipeline begins, executing each function in its stack. The stack is
defined on django's settings object's SOCIAL_AUTH_PIPELINE. This is done
in settings._set_global_settings.
5. Each pipeline function is variadic. Most pipeline functions are part of
the pythons-social-auth library; our extensions are defined below. The
pipeline is the same no matter what provider is used.
6. Pipeline functions can return a dict to add arguments to the function
invoked next. They can return None if this is not necessary.
7. Pipeline functions may be decorated with @partial.partial. This pauses
the pipeline and serializes its state onto the request's session. When
this is done they may redirect to other edX handlers to execute edX
account registration/sign in code.
8. In that code, redirecting to get_complete_url() resumes the pipeline.
This happens by hitting a handler exposed by the consumer system.
9. In this way, execution moves between the provider, the pipeline, and
arbitrary consumer system code.
Gotcha alert!:
Bear in mind that when pausing and resuming a pipeline function decorated with
@partial.partial, execution resumes by re-invoking the decorated function
instead of invoking the next function in the pipeline stack. For example, if
you have a pipeline of
A
B
C
with an implementation of
@partial.partial
def B(*args, **kwargs):
[...]
B will be invoked twice: once when initially proceeding through the pipeline
before it is paused, and once when other code finishes and the pipeline
resumes. Consequently, many decorated functions will first invoke a predicate
to determine if they are in their first or second execution (usually by
checking side-effects from the first run).
This is surprising but important behavior, since it allows a single function in
the pipeline to consolidate all the operations needed to establish invariants
rather than spreading them across two functions in the pipeline.
See http://python-social-auth.readthedocs.io/en/latest/pipeline.html for more docs.
"""
import base64
import hashlib
import hmac
import json
import urllib
from collections import OrderedDict
from logging import getLogger
from smtplib import SMTPException
import analytics
from django.conf import settings
from django.contrib.auth.models import User
from django.core.mail.message import EmailMessage
from django.urls import reverse
from django.http import HttpResponseBadRequest
from django.shortcuts import redirect
import social_django
from social_core.exceptions import AuthException
from social_core.pipeline import partial
from social_core.pipeline.social_auth import associate_by_email
import student
from edxmako.shortcuts import render_to_string
from eventtracking import tracker
from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers
from third_party_auth.utils import user_exists
from lms.djangoapps.verify_student.models import SSOVerification
from lms.djangoapps.verify_student.utils import earliest_allowed_verification_date
from . import provider
# These are the query string params you can pass
# to the URL that starts the authentication process.
#
# `AUTH_ENTRY_KEY` is required and indicates how the user
# enters the authentication process.
#
# `AUTH_REDIRECT_KEY` provides an optional URL to redirect
# to upon successful authentication
# (if not provided, defaults to `_SOCIAL_AUTH_LOGIN_REDIRECT_URL`)
AUTH_ENTRY_KEY = 'auth_entry'
AUTH_REDIRECT_KEY = 'next'
# The following are various possible values for the AUTH_ENTRY_KEY.
AUTH_ENTRY_LOGIN = 'login'
AUTH_ENTRY_REGISTER = 'register'
AUTH_ENTRY_ACCOUNT_SETTINGS = 'account_settings'
# Entry modes into the authentication process by a remote API call (as opposed to a browser session).
AUTH_ENTRY_LOGIN_API = 'login_api'
AUTH_ENTRY_REGISTER_API = 'register_api'
# AUTH_ENTRY_CUSTOM: Custom auth entry point for post-auth integrations.
# This should be a dict where the key is a word passed via ?auth_entry=, and the
# value is a dict with an arbitrary 'secret_key' and a 'url'.
# This can be used as an extension point to inject custom behavior into the auth
# process, replacing the registration/login form that would normally be seen
# immediately after the user has authenticated with the third party provider.
# If a custom 'auth_entry' query parameter is used, then once the user has
# authenticated with a specific backend/provider, they will be redirected to the
# URL specified with this setting, rather than to the built-in
# registration/login form/logic.
AUTH_ENTRY_CUSTOM
|
= getattr(settings, 'THIRD_PARTY_AUTH_CUSTOM_AUTH_FORMS', {})
def is_api(auth_entry):
"""Returns whether the auth entry point is via an API call."""
return (auth_entry == AUTH_ENTRY_LOGIN_API) or (auth_entry == AUTH_ENTRY_REGISTER_API)
# URLs associated with auth entry points
# These are used to request additional user information
# (for example, account credentials when logging in),
# and when the user cancels the auth process
# (e.g., refusing to grant permission o
|
n the provider's login page).
# We don't use "reverse" here because doing so may cause modules
# to load that depend on this module.
AUTH_DISPATCH_URLS = {
AUTH_ENTRY_LOGIN: '/login',
AUTH_ENTRY_REGISTER: '/register',
AUTH_ENTRY_ACCOUNT_SETTINGS: '/account/settings',
}
_AUTH_ENTRY_CHOICES = frozenset([
AUTH_ENTRY_LOGIN,
AUTH_ENTRY_REGISTER,
AUTH_ENTRY_ACCOUNT_SETTINGS,
AUTH_ENTRY_LOGIN_API,
AUTH_ENTRY_REGISTER_API,
] + AUTH_ENTRY_CUSTOM.keys())
logger = getLogger(__name__)
class AuthEntryError(AuthException):
"""Raised when auth_entry is invalid on URLs.
auth_entry tells us whether the auth flow was initiated to register a new
user (in which case it has the value of AUTH_ENTRY_REGISTER) or log in an
existing user (in which case it has the value of AUTH_ENTRY_LOGIN).
This is necessary because the edX code we hook into the pipeline to
redirect to the existing auth flows needs to know what case we are in in
order to format its output correctly (for example, the register code is
invoked earlier than the login code, and it needs to know if the login flow
was requested to dispatch correctly).
"""
class ProviderUserState(object):
"""Object representing the provider state (attached or not) for a user.
This is intended only for use when rendering templates. See for example
lms/templates/dashboard.html.
"""
def __init__(self, enabled_provider, user, association):
# Boolean. Whether the user has an account associated with the provider
self.has_account = association is not None
if self.has_account:
# UserSocialAuth row ID
self.association_id = association.id
# Identifier of this user according to the remote provider:
self.remote_id = enabled_provider.get_remote_id_from_social_auth(association)
else:
self.association_id = None
self.remote_id = None
# provider.BaseProvider child. Callers must verify that the provider is
# enabled.
self.provider = enabled_provider
# django.contrib.auth.models.User.
self.user = user
def get_unlink_form_name(self):
"""Gets the name used in HTML forms that unlink a provider account."""
return self.provider.provider_id + '_unlink_form'
def get(request):
"""Gets the running pipeline's
|
aurora-pro/apex-sigma
|
sigma/plugins/fun/cyanideandhappiness.py
|
Python
|
gpl-3.0
| 720
| 0.001393
|
import discord
import random
import aiohttp
from lxml import html
|
as l
async def cyanideandhappiness(cmd, message, args):
comic_number = random.randint(1, 4562)
comic_url = f'http://explosm.net/comics/{comic_number}/'
async with aiohttp.ClientSession() as session:
async with session.get(comic_url) as data:
page = await data.text()
root = l.fromstring(page)
comic_element = root.cssselect('#main-comic')
comic_img_url
|
= comic_element[0].attrib['src']
if comic_img_url.startswith('//'):
comic_img_url = 'https:' + comic_img_url
embed = discord.Embed(color=0x1ABC9C)
embed.set_image(url=comic_img_url)
await message.channel.send(None, embed=embed)
|
ftp21/BoilerPi
|
server.py
|
Python
|
gpl-3.0
| 3,260
| 0.010123
|
import multiprocessing
import socket
import re
import time
def handle(connection, address):
import logging
logging.basicConfig(level=logging.DEBUG)
logger = logging.getLogger("process-%r" % (address,))
try:
logger.debug("Connected %r at %r", connection, address)
while True:
data = connection.recv(1024)
if data == "":
logger.debug("Socket closed remotely")
connection.shutdown(1)
break
logger.debug("Received data %r", data)
if re.search("^GETTEMP",data):
logger.debug("Send temperatura")
connection.sendall(str(18)+'\n\r')
if re.search("^ACCENDI",data):
logger.debug("Accendo termosifoni")
connection.sendall('ACCESO\n\r')
if re.search("^SPEGNI",data):
logger.debug("Spegno termosifoni")
connection.sendall('SPEGNI\n\r')
except:
logger.exception("Problem handling request")
finally:
logger.debug("Closing socket")
connection.close()
class Server(object):
def __init__(self, hostname, port):
import logging
self.logger = logging.getLogger("server")
self.hostname = hostname
self.port = port
def start(self):
self.logger.debug("listening")
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.bind((self.hostname, self.port))
|
self.socket.listen(1)
while True:
conn, address = self.socket.accept()
self.logger.debug("Got co
|
nnection")
process = multiprocessing.Process(target=handle, args=(conn, address))
process.daemon = True
process.start()
self.logger.debug("Started process %r", process)
def getTemp():
return 18
def checkTemp():
logging.info("Start checktemp")
stato=2
while True:
f= open("config.ini","r")
(mintemp,maxtemp)=f.readline().split("|")
mintemp=mintemp.rstrip()
maxtemp=maxtemp.rstrip()
logging.debug("Min: %s Max: %s" % (mintemp,maxtemp))
f.close()
if getTemp()<int(mintemp) and stato != 0:
logging.debug("Temperatura bassa accendo i termosifoni")
stato=0
time.sleep(3)
if getTemp()>=int(maxtemp) and stato != 1:
logging.debug("Temperatura alta spegno i termosifoni")
stato=1
time.sleep(7)
if __name__ == "__main__":
import logging
logging.basicConfig(level=logging.DEBUG,format="%(asctime)s - %(levelname)s - %(message)s")
process = multiprocessing.Process(target=checkTemp)
process.daemon = True
process.start()
server = Server("0.0.0.0", 9000)
server.allow_reuse_address=True
try:
logging.info("Listening")
server.start()
except:
logging.exception("Unexpected exception")
finally:
logging.info("Shutting down")
for process in multiprocessing.active_children():
logging.info("Shutting down process %r", process)
process.terminate()
process.join()
logging.info("All done")
|
OCA/stock-logistics-warehouse
|
stock_measuring_device_zippcube/models/__init__.py
|
Python
|
agpl-3.0
| 131
| 0
|
# Copyright 2021 Camptocamp SA
# Licen
|
se AGPL-3.0 or later (https://www.gnu.org/licenses/agpl.html)
from . import
|
measuring_device
|
Indmind/Jomblo-Story
|
module/loading.py
|
Python
|
mit
| 442
| 0.00905
|
import time
import sys
def createDots(length, delay):
for i in range(length):
print('.', end='')
sys.stdout.flush()
time.sleep(delay)
def createHash(l
|
ength, delay):
for i in range(length):
print('#', end='')
sys.stdout.flush()
time.sleep(delay)
def createVrDots(length, delay):
for i in range(length):
print('.')
time.sl
|
eep(delay)
def deGa():
time.sleep(.3)
|
willu47/SALib
|
tests/test_ff.py
|
Python
|
mit
| 6,285
| 0.004455
|
'''
Created on 30 Jun 2015
@author: @willu47
'''
import numpy as np
from numpy.testing import assert_equal, assert_allclose
from SALib.sample.ff import sample, find_smallest, extend_bounds
from SALib.analyze.ff import analyze, interactions
def test_find_smallest():
'''
'''
num_vars = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 15, 16, 17, 31, 32, 33]
expected = [0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 6]
for x, y in zip(num_vars, expected):
actual = find_smallest(x)
assert_equal(actual, y)
def test_extend_bounds():
problem = {'bounds': np.repeat([-1, 1], 12).reshape(2, 12).T,
'num_vars': 12,
'names': ["x" + str(x + 1) for x in range(12)]
}
actual = extend_bounds(problem)
expected = {'names': ['x1', 'x2', 'x3', 'x4',
'x5', 'x6', 'x7', 'x8',
'x9', 'x10', 'x11', 'x12',
'dummy_0', 'dummy_1', 'dummy_2', 'dummy_3'],
'bounds': [np.array([-1, 1]), np.array([-1, 1]),
np.array([-1, 1]), np.array([-1, 1]),
|
np.array([-1, 1]), np.array([-1, 1]),
np.array([-1, 1]), np.array([-1, 1]),
np.array([-1, 1]), np.array([-1, 1]),
np.array([-1, 1]), np.array([-1, 1]),
np.array([0, 1]), np.array([0, 1]),
|
np.array([0, 1]), np.array([0, 1])],
'num_vars': 16}
assert_equal(actual, expected)
def test_ff_sample():
problem = {'bounds': [[0., 1.], [0., 1.], [0., 1.], [0., 1.]],
'num_vars': 4,
'names': ['x1', 'x2', 'x3', 'x4']}
actual = sample(problem)
expected = np.array([[ 1, 1, 1, 1],
[ 1, 0, 1, 0],
[ 1, 1, 0, 0],
[ 1, 0, 0, 1],
[0, 0, 0, 0],
[0, 1, 0, 1],
[0, 0, 1, 1],
[0, 1, 1, 0]], dtype=np.float)
assert_equal(actual, expected)
def test_ff_sample_scaled():
'''
'''
problem = {'bounds': [[0., 2.5], [0., 1.], [0., 1.], [0., 1.]],
'num_vars': 4,
'names': ['x1', 'x2', 'x3', 'x4']}
actual = sample(problem)
expected = np.array([[ 2.5, 1, 1, 1],
[ 2.5, 0, 1, 0],
[ 2.5, 1, 0, 0],
[ 2.5, 0, 0, 1],
[0, 0, 0, 0],
[0, 1, 0, 1],
[0, 0, 1, 1],
[0, 1, 1, 0]], dtype=np.float)
assert_equal(actual, expected)
def test_ff_analyze():
'''
'''
problem = {'bounds': [[0., 2.5], [0., 1.], [0., 1.], [0., 1.]],
'num_vars': 4,
'names': ['x1', 'x2', 'x3', 'x4']}
X = np.array([[ 1, 1, 1, 1],
[ 1, 0, 1, 0],
[ 1, 1, 0, 0],
[ 1, 0, 0, 1],
[0, 0, 0, 0],
[0, 1, 0, 1],
[0, 0, 1, 1],
[0, 1, 1, 0]], dtype=np.float)
Y = np.array([1.5, 1, 1.5, 1, 2, 2.5, 2, 2.5], dtype=np.float)
actual = analyze(problem, X, Y)
expected = {'ME': np.array([ -0.5 , 0.25, 0. , 0. ]), 'names': ['x1', 'x2', 'x3', 'x4']}
assert_equal(actual, expected)
def test_ff_example():
'''
'''
problem = {'bounds': np.repeat([-1, 1], 12).reshape(2, 12).T,
'num_vars': 12,
'names': ["x" + str(x + 1) for x in range(12)]
}
X = sample(problem)
Y = X[:, 0] + 2 * X[:, 1] + 3 * X[:, 2] + 4 * X[:, 6] * X[:, 11]
expected = np.array([10, -2, 4, -8, 2, 6, -4,
0, 2, 6, -4, 0, 10, -2, 4, -8,
- 2, -6, 4, 0, -10, 2, -4, 8,
- 10, 2, -4, 8, -2, -6, 4, 0])
assert_equal(Y, expected)
Si = analyze(problem, X, Y)
expected = np.array([1, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0], dtype=np.float)
assert_equal(expected, Si['ME'])
def test_interactions_from_saltelli():
'''
'''
problem = {'bounds': np.repeat([-1, 1], 12).reshape(2, 12).T,
'num_vars': 12,
'names': ["x" + str(x + 1) for x in range(12)]
}
X = sample(problem)
Y = np.array([10, -2, 4, -8, 2, 6, -4, 0,
2, 6, -4, 0, 10, -2, 4, -8,
- 2, -6, 4, 0, -10, 2, -4, 8,
- 10, 2, -4, 8, -2, -6, 4, 0])
Si = analyze(problem, X, Y, second_order=True)
actual = Si['IE']
expected = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0, 0.0, 0.0, 0.0, 0.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0, 0.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 4.0,
0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
assert_equal(actual, expected)
def test_interactions():
'''
'''
problem = {'bounds': [[0., 2.5], [0., 1.], [0., 1.], [0., 1.]],
'num_vars': 4,
'names': ['x1', 'x2', 'x3', 'x4']}
X = np.array([[ 2.5, 1.0, 1.0, 1.0],
[ 2.5, 0, 1.0, 0],
[ 2.5, 1.0, 0, 0],
[ 2.5, 0, 0, 1.0],
[0, 0, 0, 0],
[0, 1.0, 0, 1.0],
[0, 0, 1.0, 1.0],
[0, 1.0, 1.0, 0]], dtype=np.float)
Y = X[:, 0] + (0.1 * X[:, 1]) + ((1.2 * X[:, 2]) * (1.3 + X[:, 3]))
# Y = np.array([1.5, 1, 1.5, 1, 2, 2.5, 2, 2.5], dtype=np.float)
ie_names, ie = interactions(problem, Y, print_to_console=True)
actual = ie
assert_allclose(actual, [0.3, 0, 0, 0, 0, 0.3], rtol=1e-4, atol=1e-4)
|
stefanklug/mapnik
|
scons/scons-local-2.3.6/SCons/compat/__init__.py
|
Python
|
lgpl-2.1
| 8,150
| 0.001104
|
#
# Copyright (c) 2001 - 2015 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__doc__ = """
SCons compatibility package for old Python versions
This subpackage holds modules that provide backwards-compatible
implementations of various things that we'd like to use in SCons but which
only show up in later versions of Python than the early, old version(s)
we still support.
Other code will not generally reference things in this package through
the SCons.compat namespace. The modules included here add things to
the builtins namespace or the global module list so that the rest
of our code can use the objects and names imported here regardless of
Python version.
Simply enough, things that go in the builtins name space come from
our _scons_builtins module.
The rest of the things here will be in individual compatibility modules
that are either: 1) suitably modified copies of the future modules that
we want to use; or 2) backwards compatible re-implementations of the
specific portions of a future module's API that we want to use.
GENERAL WARNINGS: Implementations of functions in the SCons.compat
modules are *NOT* guaranteed to be fully compliant with these functions in
later versions of Python. We are only concerned with adding functionality
that we actually use in SCons, so be wary if you lift this code for
other uses. (That said, making these more nearly the same as later,
official versions is still a desirable goal, we just don't need to be
obsessive about it.)
We name the compatibility modules with an initial '_scons_' (for example,
_scons_subprocess.py is our compatibility module for subprocess) so
that we can still try to import the real module name and fall back to
our compatibility module if we get an ImportError. The import_as()
function defined below loads the module as the "real" name (without the
'_scons'), after which all of the "import {module}" statements in the
rest of our code will find our pre-loaded compatibility module.
"""
__revision__ = "src/engine/SCons/compat/__init__.py rel_2.3.5:3347:d31d5a4e74b6 2015/07/31 14:36:10 bdbaddog"
import os
import sys
import imp # Use the "imp" module to protect imports from fixers.
def import_as(module, name):
"""
Imports the specified module (from our local directory) as the
specified name, returning the loaded module object.
"""
dir = os.path.split(__file__)[0]
return imp.load_module(name, *imp.find_module(module, [dir]))
def rename_module(new, old):
"""
Attempts to import the old module and load it under the new name.
Used for purely cosmetic name changes in Python 3.x.
"""
try:
sys.modules[new] = imp.load_module(old, *imp.find_module(old))
return True
except ImportError:
return False
rename_module('builtins', '__builtin__')
import _scons_builtins
try:
import hashlib
except ImportError:
# Pre-2.5 Python has no hashlib module.
try:
import_as('_scons_hashlib', 'hashlib')
except ImportError:
# If we failed importing our compatibility module, it probably
# means this version of Python has no md5 module. Don't do
# anything and let the higher layer discover this fact, so it
# can fall back to using timestamp.
pass
try:
set
except NameError:
# Pre-2.4 Python has no native set type
import_as('_scons_sets', 'sets')
import builtins, sets
builtins.set = sets.Set
try:
import collections
except ImportError:
# Pre-2.4 Python has no collections module.
import_as('_scons_collections', 'collections')
else:
try:
collections.UserDict
except AttributeError:
exec('from UserDict import UserDict as _UserDict')
collections.UserDict = _UserDict
del _UserDict
try:
collections.UserList
except AttributeError:
exec('from UserList import UserList as _UserList')
collections.UserList = _UserList
del _UserList
try:
collections.UserString
except AttributeError:
exec('from UserString import UserString as _UserString')
collections.UserString = _UserString
del _UserString
try:
import io
except ImportError:
# Pre-2.6 Python has no io module.
import_as('_scons_io', 'io')
try:
os.devnull
except AttributeError:
# Pre-2.4 Python has no os.devnull attribute
_names = sys.builtin_module_names
if 'posix' in _names:
os.devnull = '/dev/null'
elif 'nt' in _names:
os.devnull = 'nul'
os.path.devnull = os.devnull
try:
os.path.lexists
except AttributeError:
# Pre-2.4 Python has no os.path.lexists function
def lexists(path):
return os.path.exists(path) or os.path.islink(path)
os.path.lexists = lexists
# When we're using the '-3' option during regression tests, importing
# cPickle gives a warning no matter how it's done, so always use the
# real profile module, whether it's fast or not.
if os.environ.get('SCONS_HORRIBLE_REGRESSION_TEST_HACK') is None:
# Not a regression test with '-3', so try to use faster version.
# In 3.x, 'pickle' automatically loads the fast version if available.
rename_modul
|
e('pickle', 'cPickle')
# In 3.x, 'profile' automatically loads the fast version if available.
rename_module('profile', 'cProfile')
# Before Python 3.0, the 'queue' module was named 'Queue'.
rename_module('queue', 'Queue')
# Before Python 3.0, the 'winreg' module was named '_winreg'
rename_module('winreg', '_winreg')
try:
import subprocess
except ImportError:
# Pre-2.4 Python has no subprocess module.
import_as(
|
'_scons_subprocess', 'subprocess')
try:
sys.intern
except AttributeError:
# Pre-2.6 Python has no sys.intern() function.
import builtins
try:
sys.intern = builtins.intern
except AttributeError:
# Pre-2.x Python has no builtin intern() function.
def intern(x):
return x
sys.intern = intern
del intern
try:
sys.maxsize
except AttributeError:
# Pre-2.6 Python has no sys.maxsize attribute
# Wrapping sys in () is silly, but protects it from 2to3 renames fixer
sys.maxsize = (sys).maxint
if os.environ.get('SCONS_HORRIBLE_REGRESSION_TEST_HACK') is not None:
# We can't apply the 'callable' fixer until the floor is 2.6, but the
# '-3' option to Python 2.6 and 2.7 generates almost ten thousand
# warnings. This hack allows us to run regression tests with the '-3'
# option by replacing the callable() built-in function with a hack
# that performs the same function but doesn't generate the warning.
# Note that this hack is ONLY intended to be used for regression
# testing, and should NEVER be used for real runs.
from types import ClassType
def callable(obj):
if hasattr(obj, '__call__'): return True
if isinstance(obj, (ClassType, type)): return True
return False
import builtins
builtins.callable = callable
del callable
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
chrizel/onpsx
|
src/onpsx/gallery/urls.py
|
Python
|
gpl-3.0
| 160
| 0.00625
|
from django.conf.url
|
s.defaults import *
urlpatterns = patterns('',
(r'^(\d+)/$', 'onpsx.gallery.v
|
iews.index'),
(r'^$', 'onpsx.gallery.views.index'),
)
|
eli261/jumpserver
|
apps/ops/celery/logger.py
|
Python
|
gpl-2.0
| 4,606
| 0.000434
|
from logging import StreamHandler
from django.conf import settings
from celery import current_task
from celery.signals import task_prerun, task_postrun
from kombu import Connection, Exchange, Queue, Producer
from kombu.mixins import ConsumerMixin
from .utils import get_celery_task_log_path
routing_key = 'celery_log'
celery_log_exchange = Exchange('celery_log_exchange', type='direct')
celery_log_queue = [Queue('celery_log', celery_log_exchange, routing_key=routing_key)]
class CeleryLoggerConsumer(ConsumerMixin):
def __init__(self):
self.connection = Connection(settings.CELERY_LOG_BROKER_URL)
def get_consumers(self, Consumer, channel):
return [Consumer(queues=celery_log_queue,
accept=['pickle', 'json'],
callbacks=[self.process_task])
]
def handle_task_start(self, task_id, message):
pass
def handle_task_end(self, task_id, message):
pass
def handle_task_log(self, task_id, msg, message):
pass
def process_task(self, body, message):
action = body.get('action')
task_id = body.get('task_id')
msg = body.get('msg')
if action == CeleryLoggerProducer.ACTION_TASK_LOG:
self.handle_task_log(task_id, msg, message)
elif action == CeleryLoggerProducer.ACTION_TASK_START:
self.handle_task_start(task_id, message)
elif action == CeleryLoggerProducer.ACTION_TASK_END:
self.handle_task_end(task_id, message)
class CeleryLoggerProducer:
ACTION_TASK_START, ACTION_TASK_LOG, ACTION_TASK_END = range(3)
def __init__(self):
self.connection = Connection(settings.CELERY_LOG_BROKER_URL)
@property
def producer(self):
return Producer(self.connection)
def publish(self, payload):
self.producer.publish(
payload, serializer='json', exchange=celery_log_exchange,
declare=[celery_log_exchange], routing_key=routing_key
)
def log(self, task_id, msg):
payload = {'task_id': task_id, 'msg': msg, 'action': self.ACTION_TASK_LOG}
return self.publish(payload)
def read(self):
pass
def flush(self):
pass
def task_end(self, task_id):
payload = {'task_id': task_id, 'action': self.ACTION_TASK_END}
return self.publish(payload)
def task_start(self, task_id):
payload = {'task_id': task_id, 'action': self.ACTION_TASK_START}
return self.publish(payload)
class CeleryTaskLoggerHandler(StreamHandler):
terminator = '\r\n'
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
task
|
_prerun.connect(self.on_task_start)
task_postrun.connect(self.on_start_end)
@staticmethod
def get_current_task_id():
if not current_task:
return
task_id = current_task.request.root_id
return task_id
def on_task_start(self, sender, task_id, **kwargs):
return self.handle_task_start(task_id)
def on_start_end(self, sender, task_id, **kwargs):
return self.handle_task_end(task_id)
|
def after_task_publish(self, sender, body, **kwargs):
pass
def emit(self, record):
task_id = self.get_current_task_id()
if not task_id:
return
try:
self.write_task_log(task_id, record)
self.flush()
except Exception:
self.handleError(record)
def write_task_log(self, task_id, msg):
pass
def handle_task_start(self, task_id):
pass
def handle_task_end(self, task_id):
pass
class CeleryTaskMQLoggerHandler(CeleryTaskLoggerHandler):
def __init__(self):
self.producer = CeleryLoggerProducer()
super().__init__(stream=None)
def write_task_log(self, task_id, record):
msg = self.format(record)
self.producer.log(task_id, msg)
def flush(self):
self.producer.flush()
class CeleryTaskFileHandler(CeleryTaskLoggerHandler):
def __init__(self):
self.f = None
super().__init__(stream=None)
def emit(self, record):
msg = self.format(record)
if not self.f or self.f.closed:
return
self.f.write(msg)
self.f.write(self.terminator)
self.flush()
def flush(self):
self.f and self.f.flush()
def handle_task_start(self, task_id):
log_path = get_celery_task_log_path(task_id)
self.f = open(log_path, 'a')
def handle_task_end(self, task_id):
self.f and self.f.close()
|
pinggit/plwe
|
bin/liaoxuefeng_scan.py
|
Python
|
mit
| 1,649
| 0.002939
|
#!/usr/bin/env python
# coding:utf-8
import urllib
domain = 'http://www.liaoxuefeng.com' #廖雪峰的域名
path = r'C:\Users\cyhhao2013\Desktop\temp\\' #html要保存的路径
# 一个html的头文件
input = open(r'C:\Users\cyhhao2013\Desktop\0.html', 'r')
head = input.read()
# 打开python教程主界面
f = urllib.urlopen("http://www.liaoxuefeng.co
|
m/wiki/001374738125095c955c1e6d8bb493182103fac9270762a000")
home = f.read()
f.close()
# 替换所有空格回车(这样容易好获取url)
geturl = home.replace("\n", "")
geturl = geturl.replace(" ", "")
# 得到包含url的字符串
list = geturl.split(r'em;"><ahref="')[1:]
# 强迫症犯了,一定要把第一个页面也加进去才完美
list.insert(0, '/wiki/001374738125095c955c1e6d8bb493182103fac9270762a000">')
# 开始遍历url List
for li in list:
url = li.split(r'">')[0]
url = domain + url #拼凑url
print url
f = urllib.urlopen(url
|
)
html = f.read()
# 获得title为了写文件名
title = html.split("<title>")[1]
title = title.split(" - 廖雪峰的官方网站</title>")[0]
# 要转一下码,不然加到路径里就悲剧了
title = title.decode('utf-8').replace("/", " ")
# 截取正文
html = html.split(r'<!-- block main -->')[1]
html = html.split(r'<h4>您的支持是作者写作最大的动力!</h4>')[0]
html = html.replace(r'src="', 'src="' + domain)
# 加上头和尾组成完整的html
html = head + html+"</body></html>"
# 输出文件
output = open(path + "%d" % list.index(li) + title + '.html', 'w')
output.write(html)
output.close()
|
ApexCoin/bitnote-abe
|
Abe/DataStore.py
|
Python
|
agpl-3.0
| 122,795
| 0.001678
|
# Copyright(C) 2011,2012,2013,2014 by Abe developers.
# DataStore.py: back end database access for Abe.
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# <http://www.gnu.org/licenses/agpl.html>.
# This module combines three functions that might be better split up:
# 1. Abe's schema
# 2. Abstraction over the schema for importing blocks, etc.
# 3. Code to load data by scanning blockfiles or using JSON-RPC.
import os
import re
import errno
import logging
import SqlAbstraction
import Chain
# bitcointools -- modified deserialize.py to return raw transaction
import BCDataStream
import deserialize
import util
import base58
SCHEMA_TYPE = "Abe"
SCHEMA_VERSION = SCHEMA_TYPE + "39"
CONFIG_DEFAULTS = {
"dbtype": None,
"connect_args": None,
"binary_type": None,
"int_type": None,
"upgrade": None,
"rescan": None,
"commit_bytes": None,
"log_sql": None,
"log_rpc": None,
"datadir": None,
"ignore_bit8_chains": None,
"use_firstbits": False,
"keep_scriptsig": True,
"import_tx": [],
"default_loader": "default",
}
WORK_BITS = 304 # XXX more than necessary.
CHAIN_CONFIG = [
{"chain":"BitNote",
"code3":"BNT", "address_version":"\x1a", "magic":"\xd9\xf5\xc7\xba"},
]
NULL_PUBKEY_HASH = "\0" * Chain.PUBKEY_HASH_LENGTH
NULL_PUBKEY_ID = 0
PUBKEY_ID_NETWORK_FEE = NULL_PUBKEY_ID
# Size of the script and pubkey columns in bytes.
MAX_SCRIPT = 1000000
MAX_PUBKEY = 65
NO_CLOB = 'BUG_NO_CLOB'
# XXX This belongs in another module.
class InvalidBlock(Exception):
pass
class MerkleRootMismatch(InvalidBlock):
def __init__(ex, block_hash, tx_hashes):
ex.block_hash = block_hash
ex.tx_hashes = tx_hashes
def __str__(ex):
return 'Block header Merkle root does not match its transactions. ' \
'block hash=%s' % (ex.block_hash[::-1].encode('hex'),)
class MalformedHash(ValueError):
pass
class MalformedAddress(ValueError):
pass
class DataStore(object):
"""
Bitcoin data storage class based on DB-API 2 and standard SQL with
workarounds to support SQLite3, PostgreSQL/psycopg2, MySQL,
Oracle, ODBC, and IBM DB2.
"""
def __init__(store, args):
"""
Open and store a connection to the SQL database.
args.dbtype should name a DB-API 2 driver module, e.g.,
"sqlite3".
args.connect_args should be an argument to the module's
connect() method, or None for no argument, or a list of
arguments, or a dictionary of named arguments.
args.datadir names Bitcoin data directories containing
blk0001.dat to scan for new blocks.
"""
if args.datadir is None:
args.datadir = util.determine_db_dir()
if isinstance(args.datadir, str):
args.datadir = [args.datadir]
store.args = args
store.log = logging.getLogger(__name__)
store.rpclog = logging.getLogger(__name__ + ".rpc")
if not args.log_rpc:
store.rpclog.setLevel(logging.ERROR)
if args.dbtype is None:
store.log.warn("dbtype not configured, see abe.conf for examples");
store.dbmodule = None
store.config = CONFIG_DEFAULTS.copy()
store.datadirs = []
store.use_firstbits = CONFIG_DEFAULTS['use_firstbits']
store._sql = None
return
store.dbmodule = __import__(args.dbtype)
sql_args = lambda: 1
sql_args.module = store.dbmodule
sql_args.connect_args = args.connect_args
sql_args.binary_type = args.binary_type
sql_args.int_type = args.int_type
sql_args.log_sql = args.log_sql
sql_args.prefix = "abe_"
sql_args.config = {}
store.sql_args = sql_args
store.set_db(None)
store.init_sql()
store._blocks = {}
# Read the CONFIG and CONFIGVAR tables if present.
store.config = store._read_config()
if store.config is None:
store.keep_scriptsig = args.keep_scriptsig
elif 'keep_scriptsig' in store.config:
store.keep_scriptsig = store.config.get('keep_scriptsig') == "true"
else:
store.keep_scriptsig = CONFIG_DEFAULTS['keep_scriptsig']
store.refresh_ddl()
if store.config is None:
store.initialize()
else:
store.init_sql()
if store.config['schema_version'] == SCHEMA_VERSION:
pass
elif args.upgrade:
import upgrade
upgrade.upgrade_schema(store)
else:
raise Exception(
"Database schema version (%s) does not match software"
" (%s). Please run with --upgrade to convert database."
% (store.config['schema_version'], SCHEMA_VERSION))
store._sql.auto_reconnect = True
if args.rescan:
store.sql("UPDATE datadir SET blkfile_number=1, blkfile_offset=0")
store._init_datadirs()
store.init_chains()
store.commit_bytes = args.commit_bytes
if store.commit_bytes is None:
store.commit_bytes = 0 # Commit whenever possible.
else:
store.commit_bytes = int(store.commit_bytes)
store.bytes_since_commit = 0
store.use_firstbits = (store.config['use_firstbits'] == "true")
for hex_tx in args.import_tx:
chain_name = None
if isinstance(hex_tx, dict):
chain_name = hex_tx.get("chain")
hex_tx = hex_tx.get("tx")
store.maybe_import_binary_tx(chain_name, str(hex_tx).decode('hex'))
store.default_loader = args.default_loader
store.commit()
def set_db(store, db):
store._sql = db
def get_db(store):
return store._sql
def connect(store):
return store._sql.connect()
def reconnect(store):
return store._sql.reconnect()
def close(store):
store._sql.close()
def commit(store):
store._sql.commit()
def rollback(store):
if store._sql is not None:
store._sql.rollback()
def sql(store, stmt, params=()):
store._sql.sql(stmt, params)
def ddl(store, stmt):
store._sql.ddl(stmt)
def selectrow(store, stmt, params=()):
return store._sql.selectrow(stmt, params)
def selectall(store, stmt, params=()):
return store._sql.selectall(stmt, params)
def rowcount(store):
return store._sql.rowcount()
def create_sequence(store, key):
store._sql.create_sequence(key)
def drop_sequence(store, key):
store._sql.drop_sequence(key)
def new_id(store, key):
return store._sql.new_id(key)
def init_sql(store):
sql_args = store.sql_args
if hasattr(store, 'config'):
for name in store.config.keys():
if name.startswith('sql.'):
sql_args.config[name[len('sql.'):]] = store.config[name]
if store._sql:
store._sql.close() # XXX Could
|
just set_flavour.
store.set_db(SqlAbstraction.SqlAbstraction(sql_args))
store.init_binfuncs()
def init_binfuncs(store):
store.binin = store._sql.binin
store.binin_hex = store._sql.binin_hex
store.binin_int = store._sql.binin_int
store.binout = store._sql.binout
|
store.binout_hex = store._sql.
|
danwchan/trail_of_cthulhu
|
mythos_website_upgrade/characterbirther/forms.py
|
Python
|
gpl-3.0
| 2,580
| 0.018217
|
from django.forms import ModelForm, inlineformset_factory, HiddenInput, ModelChoiceField
from .models import BirthForm, SanityPillars, InnateAbility, StabilitySources
class CharBirthForm(ModelForm):
class Meta:
model = BirthForm
fields = ['name', 'pronoun', 'age', 'birthplace', 'drive', 'occupation']
'''
#(1) no from previous mistake where each thing was it' own form instead of a single super big form
class CharBirthForm(ModelForm):
class Meta:
model = BirthForm
fields = ['name', 'pronoun', 'age', 'birthplace']
# widgets = {'birthcode' : HiddenInput()}
# widgets = {'confirm_start' : HiddenInput()}
class DriveForm(ModelForm):
class Meta:
model = BirthForm
fields = ['drive']
# widgets = {'confirm_drive' : HiddenInput()}
class OccupationForm(ModelForm):
class Meta:
model = BirthForm
fields = ['occupation']
# widgets = {'confirm_occupation' : HiddenInput()}
#(2) no longer need to define the modelform explicitly, it's made from the inlineformset_factory
class SanityForm(ModelForm):
class Meta:
model = SanityPillars
fields = ['pillar', 'description']
#commenting out the confirm fields!
# def __init__(self, *args, **kwargs):
# super(DocumentForm, self).__init__(*args, **kwargs)
# self.fields['confirm_pillars'] = BooleanField(queryset=BirthForm.objects['confirm_pillars'])
# self.fields['FORCE_confirm_pillars'] = True
# widgets = {'confirm_pillars' : HiddenInput()}
class AbilitiesForm(ModelForm):
class Meta:
model = InnateAbility
fields = ['ability', 'value']
class SourceBirthForm(ModelForm):
class Meta:
model = StabilitySources
fields = ['name', 'relation', 'personality', 'residence']
'''
#formset for pillars of sanity
PillarsOfSanity = inlineformset_factory(
BirthForm,
SanityPillars,
fields = ['pillar', 'description'],
extra = 0,
min_num = 1,
max_num = 3,
can_delete = True,
validate_min = True,
validate_max = True,
)
#formset for abilities
Abilities = inlineformset_factory(
BirthForm,
InnateAbility,
fields = ['ability', 'value'],
can_delete=True,
)
#formset for sources of Stabili
|
ty
SourcesOfStability = inlineformset_factory(
BirthForm,
StabilitySources,
fields = ['name', 'relation', 'personal
|
ity', 'residence'],
extra = 0,
min_num = 1,
max_num = 4,
can_delete = True,
validate_min = True,
validate_max = True,
)
|
jdf76/plugin.video.youtube
|
resources/lib/youtube_plugin/refresh.py
|
Python
|
gpl-2.0
| 263
| 0
|
# -*- coding: utf-8 -*-
|
"""
Copyright (C) 2018-2018 plugin.video.youtube
SPDX-License-Identifier: GPL-2.0-only
See LICENSES/GPL-2.0-only for more information.
"""
import xbmc
if __name__ == '__main__':
xbmc.executebuiltin("Container.Refresh"
|
)
|
cowboysmall/rosalind
|
src/stronghold/rosalind_suff.py
|
Python
|
mit
| 288
| 0.010417
|
import os
|
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '../tools'))
import files
import tree
def main(argv):
dna
|
= files.read_line(argv[0])
st = tree.SuffixTree(dna)
print '\n'.join(st.traverse())
if __name__ == "__main__":
main(sys.argv[1:])
|
txomon/vdsm
|
tests/configNetworkTests.py
|
Python
|
gpl-2.0
| 7,351
| 0
|
#
# Copyright 2012 IBM, Inc.
# Copyright 2012-2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from vdsm import netinfo
from testlib import VdsmTestCase as TestCaseBase
from monkeypatch import MonkeyPatch
from network import api, configurators
from network import errors
from network.models import Bond, Bridge, Nic, Vlan
def _fakeNetworks():
return {'fakebridgenet': {'iface': 'fakebridge', 'bridged': True},
'fakenet': {'iface': 'fakeint', 'bridged': False}}
def _raiseInvalidOpException(*args, **kwargs):
return RuntimeError('Attempted to apply network configuration during unit '
'testing.')
class TestConfigNetwork(TestCaseBase):
def _addNetworkWithExc(self, netName, opts, errCode):
with self.assertRaises(errors.ConfigNetworkError) as cneContext:
api._addNetwork(netName, **opts)
self.assertEqual(cneContext.exception.errCode, errCode)
# Monkey patch the real network detection from the netinfo module.
|
@MonkeyPatch(netinfo, 'networks', _fakeNetworks)
@MonkeyPatch(netinfo, 'getMaxMtu', lambda *x: 1500)
@MonkeyPatch(netinfo, 'getMtu', lambda *x: 1500)
@MonkeyPatc
|
h(configurators.ifcfg, 'ifdown', lambda *x:
_raiseInvalidOpException())
@MonkeyPatch(configurators.ifcfg, 'ifup',
lambda *x: _raiseInvalidOpException())
@MonkeyPatch(Bond, 'configure', lambda *x: _raiseInvalidOpException())
@MonkeyPatch(Bridge, 'configure', lambda *x: _raiseInvalidOpException())
@MonkeyPatch(Nic, 'configure', lambda *x: _raiseInvalidOpException())
@MonkeyPatch(Vlan, 'configure', lambda *x: _raiseInvalidOpException())
def testAddNetworkValidation(self):
_netinfo = {
'networks': {
'fakent': {'iface': 'fakeint', 'bridged': False},
'fakebrnet': {'iface': 'fakebr', 'bridged': True,
'ports': ['eth0', 'eth1']},
'fakebrnet1': {'iface': 'fakebr1', 'bridged': True,
'ports': ['bond00']},
'fakebrnet2': {'iface': 'fakebr2', 'bridged': True,
'ports': ['eth7.1']},
'fakebrnet3': {'iface': 'eth8', 'bridged': False}
},
'vlans': {
'eth3.2': {'iface': 'eth3',
'addr': '10.10.10.10',
'netmask': '255.255.0.0',
'mtu': 1500
},
'eth7.1': {'iface': 'eth7',
'addr': '192.168.100.1',
'netmask': '255.255.255.0',
'mtu': 1500
}
},
'nics': ['eth0', 'eth1', 'eth2', 'eth3', 'eth4', 'eth5', 'eth6',
'eth7', 'eth8', 'eth9', 'eth10'],
'bridges': {
'fakebr': {'ports': ['eth0', 'eth1']},
'fakebr1': {'ports': ['bond00']},
'fakebr2': {'ports': ['eth7.1']}
},
'bondings': {'bond00': {'slaves': ['eth5', 'eth6']}}
}
fakeInfo = netinfo.NetInfo(_netinfo)
nics = ['eth2']
# Test for already existing bridge.
self._addNetworkWithExc('fakebrnet', dict(nics=nics,
_netinfo=fakeInfo), errors.ERR_USED_BRIDGE)
# Test for already existing network.
self._addNetworkWithExc('fakent', dict(nics=nics, _netinfo=fakeInfo),
errors.ERR_USED_BRIDGE)
# Test for bonding opts passed without bonding specified.
self._addNetworkWithExc('test', dict(nics=nics,
bondingOptions='mode=802.3ad',
_netinfo=fakeInfo), errors.ERR_BAD_BONDING)
# Test IP without netmask.
self._addNetworkWithExc('test', dict(nics=nics, ipaddr='10.10.10.10',
_netinfo=fakeInfo), errors.ERR_BAD_ADDR)
# Test netmask without IP.
self._addNetworkWithExc('test', dict(nics=nics,
netmask='255.255.255.0', _netinfo=fakeInfo),
errors.ERR_BAD_ADDR)
# Test gateway without IP.
self._addNetworkWithExc('test', dict(nics=nics, gateway='10.10.0.1',
_netinfo=fakeInfo), errors.ERR_BAD_ADDR)
# Test for non existing nic.
self._addNetworkWithExc('test', dict(nics=['eth11'],
_netinfo=fakeInfo), errors.ERR_BAD_NIC)
# Test for nic already bound to a different network.
self._addNetworkWithExc('test', dict(bonding='bond0', nics=['eth0',
'eth1'], _netinfo=fakeInfo),
errors.ERR_USED_NIC)
# Test for bond already member of a network.
self._addNetworkWithExc('test', dict(bonding='bond00', nics=['eth5',
'eth6'], _netinfo=fakeInfo),
errors.ERR_BAD_PARAMS)
# Test for multiple nics without bonding device.
self._addNetworkWithExc('test', dict(nics=['eth3', 'eth4'],
_netinfo=fakeInfo), errors.ERR_BAD_BONDING)
# Test for nic already in a bond.
self._addNetworkWithExc('test', dict(nics=['eth6'], _netinfo=fakeInfo),
errors.ERR_USED_NIC)
# Test for adding a new non-VLANed bridgeless network when a non-VLANed
# bridgeless network exists
self._addNetworkWithExc('test', dict(nics=['eth8'], bridged=False,
_netinfo=fakeInfo), errors.ERR_BAD_PARAMS)
def testBuildBondOptionsBadParams(self):
class FakeNetInfo(object):
def __init__(self):
self.bondings = ['god', 'bless', 'potatoes']
with self.assertRaises(errors.ConfigNetworkError) as cne:
api._buildBondOptions('jamesbond', {}, _netinfo=FakeNetInfo())
self.assertEquals(cne.exception.errCode, errors.ERR_BAD_PARAMS)
@MonkeyPatch(netinfo, 'NetInfo', lambda: None)
def testValidateNetSetupRemoveParamValidation(self):
attrs = dict(nic='dummy', remove=True,
bridged=True)
networks = {'test-netowrk': attrs}
with self.assertRaises(errors.ConfigNetworkError) as cneContext:
api._validateNetworkSetup(networks, {})
self.assertEqual(cneContext.exception.errCode,
errors.ERR_BAD_PARAMS)
|
rwl/pylon
|
examples/pyreto/rlopf.py
|
Python
|
apache-2.0
| 5,046
| 0.005549
|
__author__ = 'Richard Lincoln, r.w.lincoln@gmail.com'
""" This example demonstrates how optimise power flow with Pyreto. """
import sys
import logging
import numpy
import scipy.io
import pylab
import pylon
import pyreto
from pyreto.util import plotGenCost
from pybrain.rl.agents import LearningAgent
from pybrain.rl.learners import ENAC, Reinforce
from pybrain.rl.experiments import EpisodicExperiment
from pybrain.rl.agents import OptimizationAgent
from pybrain.optimization import HillClimber, CMAES, ExactNES, PGPE, FEM
from pybrain.tools.shortcuts import buildNetwork
from pybrain.tools.plotting import MultilinePlotter
logger = logging.getLogger()
for handler in logger.handlers: logger.removeHandler(handler) # rm pybrain
logger.addHandler(logging.StreamHandler(sys.stdout))
logger.setLevel(logging.DEBUG)
#logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
case = pylon.Case.load("../data/case6ww.pkl")
case.generators[0].p_cost = (0.0, 16.0, 200.0)
case.generators[1].p_cost = (0.0, 2.0, 200.0)
case.generators[2].p_cost = (0.0, 32.0, 200.0)
case.buses[3].p_demand = 120.0
case.buses[4].p_demand = 120.0
case.buses[5].p_demand = 120.0
#plotGenCost(case.generators)
# Assume initial demand is peak demand (for sensor limits) and save it.
Pd0 = [b.p_demand for b in case.buses if b.type == pylon.PQ]
# Define a 24-hour load profile with hourly values.
p1h = numpy([0.52, 0.54, 0.52, 0.50, 0.52, 0.57, 0.60, 0.71, 0.89, 0.85, 0.88,
0.94, 0.90, 0.88, 0.88, 0.82, 0.80, 0.78, 0.76, 0.68, 0.68, 0.68,
|
0.65, 0.58])
#p1h = p1h[6:-6]
p1h = p1h[:12]
nf = len(p1h)
# Create a case environment specifying the load profile.
env = pyreto.CaseEnvironment(case, p1h)
# Create an episodic cost minimisation task.
task = pyreto.MinimiseCostTask(env)
# Create a network
|
for approximating the agent's policy function that maps
# system demand to generator set-points..
nb = len([bus for bus in case.buses if bus.type == pylon.PQ])
ng = len([g for g in case.online_generators if g.bus.type != pylon.REFERENCE])
net = buildNetwork(nb, ng, bias=False)
# Create an agent and select an episodic learner.
#learner = Reinforce()
learner = ENAC()
#learner.gd.rprop = True
## only relevant for RP
#learner.gd.deltamin = 0.0001
##agent.learner.gd.deltanull = 0.05
## only relevant for BP
#learner.gd.alpha = 0.01
#learner.gd.momentum = 0.9
agent = LearningAgent(net, learner)
# Adjust some parameters of the NormalExplorer.
sigma = [50.0] * ng
learner.explorer.sigma = sigma
#learner.explorer.epsilon = 0.01 # default: 0.3
#learner.learningRate = 0.01 # (0.1-0.001, down to 1e-7 for RNNs)
# Alternatively, use blackbox optimisation.
#learner = HillClimber(storeAllEvaluations=True)
##learner = CMAES(storeAllEvaluations=True)
##learner = FEM(storeAllEvaluations=True)
##learner = ExactNES(storeAllEvaluations=True)
##learner = PGPE(storeAllEvaluations=True)
#agent = OptimizationAgent(net, learner)
# Prepare for plotting.
pylab.figure()#figsize=(16,8))
pylab.ion()
plot = MultilinePlotter(autoscale=1.1, xlim=[0, nf], ylim=[0, 1])
# Read ideal system cost and set-point values determined using OPF.
f_dc = scipy.io.mmread("../data/fDC.mtx").flatten()
f_ac = scipy.io.mmread("../data/fAC.mtx").flatten()
Pg_dc = scipy.io.mmread("../data/PgDC.mtx")
Pg_ac = scipy.io.mmread("../data/PgAC.mtx")
Qg_ac = scipy.io.mmread("../data/QgAC.mtx")
rday = range(nf)
for i in range(len(case.online_generators)):
plot.setData(i, rday, numpy.zeros(nf))
plot.setData(3, rday, f_dc[:nf])
plot.setData(4, rday, f_ac[:nf])
plot.setData(5, rday, numpy.zeros(nf)) # reward
#plot.setData(6, rday, Pg_ac[:nf] * 10)
plot.setLineStyle(0, color="red")
plot.setLineStyle(1, color="green")
plot.setLineStyle(2, color="blue")
plot.setLineStyle(3, color="black")
plot.setLineStyle(4, color="gray")
plot.setLineStyle(5, color="orange")
#plot.setLineStyle(6, color="black")
plot.setLineStyle(linewidth=2)
plot.update()
# Give the agent its task in an experiment.
#experiment = EpisodicExperiment(task, agent)
experiment = pyreto.rlopf.OPFExperiment(task, agent)
weeks = 52 * 2
days = 5 # number of samples per gradient estimate
for week in range(weeks):
all_rewards = experiment.doEpisodes(number=days)
tot_reward = numpy.mean(agent.history.getSumOverSequences('reward'))
# print learner._allEvaluations#[-:-1]
# Plot the reward at each period averaged over the week.
r = -1.0 * numpy.array(all_rewards).reshape(days, nf)
avg_r = numpy.mean(r, 0)
plot.setData(5, rday, avg_r)
# Plot the set-point of each generator on the last day of the week.
# FIXME: Plot the set-points averaged over the week.
for i in range(len(case.online_generators)):
scale_factor = 10
# plot.setData(i, rday, env._Pg[i, :] * scale_factor)
plot.setData(i, rday, experiment.Pg[i, :] * scale_factor)
agent.learn()
agent.reset()
# Scale sigma manually.
sigma = [(sig * 0.95) - 0.05 for sig in sigma]
learner.explorer.sigma = sigma
plot.update()
pylab.savefig("/tmp/rlopf.png")
|
acsone/mozaik
|
mozaik_communication/mail_mail.py
|
Python
|
agpl-3.0
| 1,638
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of mozaik_communication, an Odoo module.
#
# Copyright (c) 2015 ACSONE SA/NV (<http://acsone.eu>)
#
# mozaik_communication is free software:
# you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either version 3 of
# the License, or (at your option) any later version.
#
# mozaik_communication is distributed in the hope that it will
# be useful but WITHOUT ANY WARRANTY; without even the implied warranty
|
of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the
# GNU Affero General Public License
# along with mozaik_communication.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm
class MailMail(orm.Model):
_inherit = 'mai
|
l.mail'
def _get_unsubscribe_url(
self, cr, uid, mail, email_to, msg=None, context=None):
'''
Override native method to manage unsubscribe URL for distribution list
case of newsletter.
'''
mml = mail.mailing_id
if mml.distribution_list_id and mml.distribution_list_id.newsletter:
return super(MailMail, self)._get_unsubscribe_url(
cr, uid, mail, email_to, msg=msg, context=context)
else:
return ''
|
toladata/TolaActivity
|
formlibrary/tests/test_models.py
|
Python
|
apache-2.0
| 1,601
| 0
|
# -*- coding: utf-8 -*-
from django.core.exceptions import ValidationError
from django.test import TestCase, tag
import factories
from formlibrary.models import CustomForm
@tag('pkg')
class CustomFormTest(TestCase):
def setUp(self):
self.organization = factories.Organization()
self.user = factories.User()
def test_save_without_public_info(self):
custom_form = CustomForm(
organization=self.organization,
name="Humanitec's Survey",
fields="{}",
public={}
)
self.assertRaises(ValidationError, custom_form.save)
def test_save_without_public_org_info(self):
custom_form = CustomForm(
organization=self.organization,
name="Humanitec's Survey",
fields="{}",
public={'url': True
|
}
)
self.assertRaises(ValidationError, custom_form.save)
def test_save_without_public_url_info(self):
custom_form = CustomForm(
organization=self.organization,
name="Humanitec's Survey",
fields="{}",
public={'org': True}
)
self.assertRaises(ValidationError, custom_form.save)
def test_save_with_public_info(self):
custom
|
_form = CustomForm.objects.create(
organization=self.organization,
name="Humanitec's Survey",
fields="{}",
public={'org': True, 'url': True}
)
self.assertEqual(custom_form.name, "Humanitec's Survey")
self.assertEqual(custom_form.public, {'org': True, 'url': True})
|
openconnectome/open-connectome
|
scripts/ingest/flyem/flyem_anno_parallel.py
|
Python
|
apache-2.0
| 4,133
| 0.031696
|
import argparse
import numpy as np
from PIL import Image
import ocppaths
import ocpcarest
import zindex
import anydbm
import multiprocessing
import pdb
#
# ingest the PNG files into the database
#
"""This file is super-customized for Mitya's FlyEM data."""
# Stuff we make take from a config or the command line in the future
#ximagesz = 12000
#yimagesz = 12000
parser = argparse.ArgumentParser(description='Ingest the FlyEM image data.')
parser.add_argument('baseurl', action="store", help='Base URL to of ocp service no http://, e. g. neurodata.io')
parser.add_argument('token', action="store", help='Token for the annotation project.')
parser.add_argument('path', action="store", help='Directory with annotation PNG files.')
parser.add_argument('process', action="store", help='Number of processes.')
result = parser.parse_args()
# convert to an argument
resolution = 0
# load a database
[ db, proj, projdb ] = ocpcarest.loadDBProj ( result.token )
# get the dataset configuration
(xcubedim,ycubedim,zcubedim)=proj.datasetcfg.cubedim[resolution]
(startslice,endslice)=proj.datasetcfg.slicerange
batchsz=zcubedim
# This doesn't work because the image size does not match exactly the cube size
#(ximagesz,yimagesz)=proj.datasetcfg.imagesz[resolution]
ximagesz = 12000
yimagesz = 12000
batchsz=16
totalslices = range(startslice,endslice,16)
totalprocs = int(result.process)
#global anydb
#pdb.set_trace()
#anydb = anydbm.open('bodydict','r')
#anydb = dict(anydb)
def parallelwrite(slicenumber):
# Accessing the dict in dbm
#anydb = anydbm.open('bodydict','r')
[ db, proj, projdb ] = ocpcarest.loadDBProj ( result.token )
#print slicenumber
startslice = slicenumber
endslice = startslice+16
# Get a list of the files in the directories
for sl in range (startslice, endslice+1, batchsz):
slab = np.zeros ( [ batchsz, yimagesz, ximagesz ], dtype=np.uint32 )
for b in range ( batchsz ):
if ( sl + b <= endslice and sl + b<=1460 ):
# raw data
filenm = result.path + '/superpixel.' + '{:0>5}'.format(sl+b) + '.png'
#print "Opening filenm " + filenm
img = Image.open ( filenm, 'r' )
imgdata = np.asarray ( img )
#Adding new lines
anydb = anydbm.open('bodydict2','r')
superpixelarray = imgdata[:,:,0] + (np.uint32(imgdata[:,:,1])<<8)
newdata = np.zeros([superpixelarray.shape[0],superpixelarray.shape[1]], dtype=np.uint32)
#print "slice",sl+b,"batch",sl
print sl+b,multiprocessing.current_process()
for i in range(superpixelarray.shape[0]):
for j in range(superpixelarray.shape[1]):
key = str(sl)+','+str(superpixelarray[i,j])
if( key not in anydb):
f = open('missing_keys', 'a')
f.write(key+'\n')
f.close()
print "Error Detected Writing to File"
dictvalue = '0'
else:
dictvalue = anydb.get( key )
newdata[i,j] = int(dictvalue)
slab[b,:,:] = newdata
print "end of slice:",sl+b
anydb.close()
print "Entering commit phase"
# Now we have a 1024x1024x16 z-aligned cube.
# Send it to the database.
for y in range ( 0, yimagesz, ycubedim ):
for x in range ( 0, ximagesz, xcubedim ):
mortonidx = zindex.XYZMorton ( [ x/xcubedim, y/ycubedim, (sl-startslice)/zcubedim] )
cubedata = np.zeros ( [zcubedim, ycubedim, xcubedim], dtype=np.uint32 )
xmin = x
ymin = y
xmax = min ( ximagesz, x+xcubedim )
ymax = min ( yimagesz, y+ycubedim )
zmin = 0
zmax = min(sl+zcubedim,endslice+1)
cubedata[0:zmax-zmin,0:ymax-ymin,0:xmax-xmin] = slab[
|
zmin:zmax,ymin:ymax,xmin:xmax]
# insert the blob into the database
db.annotateDense ((x,y,sl-startslice), resolution, cubedata, 'O')
print "Commiting at x=%s, y=%s, z=%s" % (x,y,sl)
db.conn.commit(
|
)
return None
def run():
flypool = multiprocessing.Pool(totalprocs)
flypool.map(parallelwrite, totalslices, 16)
if __name__ == "__main__":
run()
|
DanielTakeshi/rl_algorithms
|
es/plot.py
|
Python
|
mit
| 4,221
| 0.007344
|
"""
To plot this, you need to provide the experiment directory plus an output stem.
I use this for InvertedPendulum:
python plot.py outputs/InvertedPendulum-v1 --envname InvertedPendulum-v1 \
--out figures/InvertedPendulum-v1
(c) May 2017 by Daniel Seita
"""
import argparse
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import os
import pickle
import seaborn as sns
import sys
from os.path import join
from pylab import subplots
plt.style.use('seaborn-darkgrid')
sns.set_context(rc={'lines.markeredgewidth': 1.0})
np.set_printoptions(edgeitems=100,linewidth=100,suppress=True)
# Some matplotlib settings.
LOGDIR = 'outputs/'
FIGDIR = 'figures/'
title_size = 22
tick_size = 18
legend_size = 17
ysize = 18
xsize = 18
lw = 1
ms = 8
error_region_alpha = 0.3
# Attributes to include in a plot.
ATTRIBUTES = ["FinalAvgReturns",
"FinalStdReturns",
"FinalMaxReturns",
"FinalMinReturns",
"ScoresAvg",
"ScoresStd",
"ScoresMax",
"ScoresMin"]
# Axes labels for environments.
ENV_TO_YLABELS = {"HalfCheetah-v1": [-800,1000],
"InvertedPendulum-v1": [0,1000]}
# Colors. In general we won't use all of these.
COLORS = ['blue', 'red', 'gold', 'black']
def plot_one_dir(args, directory):
""" The actual plotting code.
Assumes that we'll be plotting from one directory, which usually means
considering one random se
|
ed only, however it's better to have multiple
random seeds so this code generalizes. For ES, we should store the output at
*every* timestep, so A['TotalIterati
|
ons'] should be like np.arange(...), but
this generalizes in case Ray can help me run for many more iterations.
"""
print("Now plotting based on directory {} ...".format(directory))
### Figure 1: The log.txt file.
num = len(ATTRIBUTES)
fig, axes = subplots(num, figsize=(12,3*num))
for (dd, cc) in zip(directory, COLORS):
A = np.genfromtxt(join(args.expdir, dd, 'log.txt'),
delimiter='\t', dtype=None, names=True)
x = A['TotalIterations']
for (i,attr) in enumerate(ATTRIBUTES):
axes[i].plot(x, A[attr], '-', lw=lw, color=cc, label=dd)
axes[i].set_ylabel(attr, fontsize=ysize)
axes[i].tick_params(axis='x', labelsize=tick_size)
axes[i].tick_params(axis='y', labelsize=tick_size)
axes[i].legend(loc='best', ncol=1, prop={'size':legend_size})
plt.tight_layout()
plt.savefig(args.out+'_log.png')
### Figure 2: Error regions.
num = len(directory)
if num == 1:
num+= 1
fig, axes = subplots(1,num, figsize=(12*num,10))
for (i, (dd, cc)) in enumerate(zip(directory, COLORS)):
A = np.genfromtxt(join(args.expdir, dd, 'log.txt'),
delimiter='\t', dtype=None, names=True)
axes[i].plot(A['TotalIterations'], A["FinalAvgReturns"],
color=cc, marker='x', ms=ms, lw=lw)
axes[i].fill_between(A['TotalIterations'],
A["FinalAvgReturns"] - A["FinalStdReturns"],
A["FinalAvgReturns"] + A["FinalStdReturns"],
alpha = error_region_alpha,
facecolor='y')
axes[i].set_ylim(ENV_TO_YLABELS[args.envname])
axes[i].tick_params(axis='x', labelsize=tick_size)
axes[i].tick_params(axis='y', labelsize=tick_size)
axes[i].set_title("Mean Episode Rewards ({})".format(dd), fontsize=title_size)
axes[i].set_xlabel("ES Iterations", fontsize=xsize)
axes[i].set_ylabel("Rewards", fontsize=ysize)
plt.tight_layout()
plt.savefig(args.out+'_rewards_std.png')
if __name__ == "__main__":
"""
Handle logic with argument parsing.
"""
parser = argparse.ArgumentParser()
parser.add_argument("expdir", help="experiment dir, e.g., /tmp/experiments")
parser.add_argument("--out", type=str, help="full directory where to save")
parser.add_argument("--envname", type=str)
args = parser.parse_args()
plot_one_dir(args, directory=os.listdir(args.expdir))
|
agartland/utils
|
corncob_r.py
|
Python
|
mit
| 3,388
| 0.007701
|
import pandas as pd
import numpy as np
from os.path import join as opj
import sys
from fg_shared import *
sys.path.append(opj(_git, 'utils'))
import qu
|
ickr
corncob = """
require(dplyr)
require(tidy
|
r)
require(purr)
require(corncob)
###########################################################################################
# APPLIED TO MANY FEATURES:
###########################################################################################
# Function to Fit A Beta-Binomial Model to A Single Feature
# Note: YOU HAVE A CHOICE OF W,WR,or W0 which repressent different counts
# W are counts of Meta-Clonotype (RADIUS ONLY)
# WR are counts of Meta-Clonotype (RADIUS + REGEX ONLY)
# W0 are counts of Clonotype (TCRDIST0 basically EXACT CLONOTYPE)
# M total counts
# AGE age in years
# SEX "Male" or "Female"
# DAYS 1 if > 2 days post diagnosis, 0 otherwise
# HLA "MATCH" or "NON-MATCH" (in this case A*01)
###########################################################################################
#' do_corncob
#'
#' Define the beta-binomial we are attempting to fit
#'
#' @param mydata data.frame
do_corncob <- function(mydata, frm = as.formula('cbind(W, M - W) ~ AGE+SEX+DAYS+HLA')){
cb1 = bbdml(formula = frm,
phi.formula = ~ 1,
data = mydata)
return(cb1)
}
# This wrapper is useful for avoiding crashes do to errors:
possibly_do_corncob = purrr::possibly(do_corncob, otherwise = NA)
###########################################################################################
# Split Data by Feature
###########################################################################################
list_of_df_by_feature = example_df %>% split(f = example_df$feature)
###########################################################################################
# Fit Models
###########################################################################################
list_of_fit_models = purrr::map(list_of_df_by_feature, ~possibly_do_corncob(mydata = .x, frm = as.formula('cbind(W, M - W) ~ AGE+SEX+DAYS+HLA')))
list_of_fit_models = list_of_fit_models[!is.na(list_of_fit_models)]
###########################################################################################
# Parse Models
###########################################################################################
#' get bbdml coefficients into a table
#'
#'
#' @param cb is object result of corncob::bbdml
#' @param i is a label for the feature name
#'
#' @example
#' purrr::map2(list_of_fit_models, names(list_of_fit_models), ~parse_corncob(cb = .x, i = .y))
parse_corncob <- function(cb,i =1){
y = summary(cb)$coefficients
rdf = as.data.frame(y)
rdf$param = rownames(rdf)
rdf = rdf %>% mutate(estimate = Estimate, se = `Std. Error`, tvalue = `t value`, pvalue = `Pr(>|t|)`, param) %>%
mutate(type = ifelse(grepl(param, pattern = "phi"), "phi", "mu")) %>%
mutate(type2 = ifelse(grepl(param, pattern = "Intercept"), "intercept", "covariate"))
rdf$feature = i
return(rdf)
}
tabular_results = purrr::map2(list_of_fit_models, names(list_of_fit_models), ~parse_corncob(cb = .x, i = .y))
tabular_results = do.call(rbind, tabular_results) %>% tibble::remove_rownames()
clean_tabular_results = tabular_results %>% select(feature, Estimate, pvalue, param, type, type2) %>%
arrange(type2, type, pvalue)
"""
|
zymsys/sms-tools
|
software/models/spsModel.py
|
Python
|
agpl-3.0
| 7,289
| 0.024283
|
# functions that implement analysis and synthesis of sounds using the Sinusoidal plus Stochastic Model
# (for example usage check the models_interface directory)
import numpy as np
from scipy.signal import resample, blackmanharris, triang, hanning
from scipy.fftpack import fft, ifft, fftshift
import math
import utilFunctions as UF
import dftModel as DFT
import sineModel as SM
import stochasticModel as STM
def spsModelAnal(x, fs, w, N, H, t, minSineDur, maxnSines, freqDevOffset, freqDevSlope, stocf):
"""
Analysis of a sound using the sinusoidal plus stochastic model
x: input sound, fs: sampling rate, w: analysis window; N: FFT size, t: threshold in negative dB,
minSineDur: minimum duration of sinusoidal tracks
maxnSines: maximum number of parallel sinusoids
freqDevOffset: frequency deviation allowed in the sinusoids from frame to frame at frequency 0
freqDevSlope: slope of the frequency deviation, higher frequencies have bigger deviation
stocf: decimation factor used for the stochastic approximation
returns hfreq, hmag, hphase: harmonic frequencies, magnitude and phases; stocEnv: stochastic residual
"""
# perform sinusoidal analysis
tfreq, tmag, tphase = SM.sineModelAnal(x, fs, w, N, H, t, maxnSines, minSineDur, freqDevOffset, freqDevSlope)
Ns = 512
xr = UF.sineSubtraction(x, Ns, H, tfreq, tmag, tphase, fs) # subtract sinusoids from original sound
stocEnv = STM.stochasticModelAnal(xr, H, H*2, stocf) # compute stochastic model of residual
return tfreq, tmag, tphase, stocEnv
def spsModelSynth(tfreq, tmag, tphase, stocEnv, N, H, fs):
"""
Synthesis of a sound using the sinusoidal plus stochastic model
tfreq, tmag, tphase: sinusoidal frequencies, amplitudes and phases; stocEnv: stochastic envelope
N: synthesis FFT size; H: hop size, fs: sampling rate
returns y: output sound, ys: sinusoidal component, yst: stochastic component
"""
ys = SM.sineModelSynth(tfreq, tmag, tphase, N, H, fs) # synthesize sinusoids
yst = STM.stochasticModelSynth(stocEnv, H, H*2) # synthesize stochastic residual
y = ys[:min(ys.size, yst.size)]+yst[:min(ys.size, yst.size)] # sum sinusoids and stochastic components
return y, ys, yst
def spsModel(x, fs, w, N, t, stocf):
"""
Analysis/synthesis of a sound using the sinusoidal plus stochastic model
x: input sound, fs: sampling rate, w: analysis window,
N: FFT size (minimum 512), t: threshold in negative dB,
stocf: decimation factor of mag spectrum for stochastic analysis
returns y: output sound, ys: sinusoidal component, yst: stochastic component
"""
hN = N/2 # size of positive spectrum
hM1 = int(math.floor((w.size+1)/2)) # half analysis window size by rounding
hM2 = int(math.floor(w.size/2)) # half analysis window size by floor
Ns = 512 # FFT size for synthesis (even)
H = Ns/4 # Hop size used for analysis and synthesis
hNs = Ns/2
pin = max(hNs, hM1) # initialize sound pointer in middle of analysis window
pend = x.size - max(hNs, hM1) # last sample to start a frame
fftbuffer = np.zeros(N) # initialize buffer for FFT
ysw = np.zeros(Ns) # initialize output sound frame
ystw = np.zeros(Ns) # initialize output sound frame
ys = np.zeros(x.size) # initialize output array
yst = np.zeros(x.size) # initialize output array
w = w / sum(w) # normalize analysis window
sw = np.zeros(Ns)
ow = triang(2*H) # overlapping window
sw[hNs-H:hNs+H] = ow
bh = blackmanharris(Ns) # synthesis window
bh = bh / sum(bh) # normalize synthesis window
wr = bh # window for residual
sw[hNs-H:hNs+H] = sw[hNs-H:hNs+H] / bh[hNs-H:hNs+H]
sws = H*hanning(Ns)/2 # synthesis window for stochastic
while pin<pend:
#-----analysis-----
x1 = x[pin-hM1:pin+hM2] # select frame
mX, pX = DFT.dftAnal(x1, w, N) # compute dft
ploc = UF.peakDetection(mX, t) # find peaks
iploc, ipmag, ipphase = UF.peakInterp(mX, pX, ploc) # refine peak values iploc, ipmag, ipphase = UF.peakInterp(mX, pX, ploc) # refine peak values
ipfreq = fs*iploc/float(N) # convert peak locations to Hertz
ri = pin-hNs-1 # input sound pointer for residual analysis
xw2 = x[ri:ri+Ns]*wr # window the input sound
fftbuffer = np.zeros(Ns) # reset buffer
fftbuffer[:hNs] = xw2[hNs:] # zero-phase window in fftbuffer
fftbuffer[hNs:] = xw2[:hNs]
X2 = fft(fftbuffer) # compute FFT for residual analysis
#-----synthesis-----
Ys = UF.genSpecSines(ipfreq, ipmag, ipphase, Ns, fs) # generate spec of sinusoidal component
Xr = X2-Ys; # get the residual complex spectrum
mXr = 20 * np.log10(abs(Xr[:hNs])) # magnitude spectrum of residual
mXrenv = resample(np.maximum(-200, mXr), mXr.size*stocf) # decimate the magnitude spectrum and avoid -Inf
stocEnv = resample(mXrenv, hNs) # interpolate to original size
pYst = 2*np.pi*np.random.rand(hNs) # generate phase random values
Yst = np.zeros(Ns, dtype = complex)
Yst[:hNs] = 10**(stocEnv/20) * np.exp(1j*pYst) # generate positive freq.
Yst[hNs+1:] = 10**(stocEnv[:0:-1]/20) * np.exp(-1j*pYst[:0:-1]) # generate negative freq.
fftbuffer = np.zeros(Ns)
fftbuffer = np.real(ifft(Ys))
|
# inverse FFT of harmonic spectrum
ysw[:hNs-1] = fftbuffer[hNs+1:]
|
# undo zero-phase window
ysw[hNs-1:] = fftbuffer[:hNs+1]
fftbuffer = np.zeros(Ns)
fftbuffer = np.real(ifft(Yst)) # inverse FFT of stochastic spectrum
ystw[:hNs-1] = fftbuffer[hNs+1:] # undo zero-phase window
ystw[hNs-1:] = fftbuffer[:hNs+1]
ys[ri:ri+Ns] += sw*ysw # overlap-add for sines
yst[ri:ri+Ns] += sws*ystw # overlap-add for stochastic
pin += H # advance sound pointer
y = ys+yst # sum of sinusoidal and residual components
return y, ys, yst
|
noahgoldman/torwiz
|
torrents/torrents/database.py
|
Python
|
mit
| 232
| 0.012931
|
def get_all(tordb):
return tordb.find()
de
|
f delete(tordb, obj_id):
tordb.remove([obj_id])
def insert(tordb, obj):
return t
|
ordb.insert(obj)
def update_full(tordb, id, obj):
tordb.update({'_id': id}, {'$set': obj})
|
aeivazi/classroom-tracking
|
src/face_clipper.py
|
Python
|
mit
| 257
| 0.007782
|
def clip_matrix(image_as_matrix
|
, width, height, top, left, expand_by=0):
x1 = left
x2 = left + width
y1 = top
y2 = top + height
crop_img = image_as_matrix[y1-expand_by:y2+expand_by, x1-expand_by:x2+expand_by]
|
return crop_img
|
segfault87/Konstruktor
|
scripts/generate_color_table.py
|
Python
|
gpl-3.0
| 1,594
| 0.003137
|
#!/usr/bin/python
import sys
if (len(sys.argv) < 2):
fn = '/usr/share/ldraw/LDConfig.ldr'
else:
fn = sys.argv[1]
f = open(fn)
for line in f:
if '!COLOUR' in line:
line = line.strip()
ns = line.split()
category = ''
if 'RUBBER' in line:
category = 'material_rubber'
elif 'METAL' in line:
category = 'material_metallic'
elif 'SPECKLE' in line:
category = 'material_speckle'
elif 'GLITTER' in line:
category = 'material_glitter'
elif 'LUMINANCE' in line:
category = 'material_luminant'
elif 'PEARLESCENT' in line:
category = 'material_pearlescent'
elif 'CHROME' in line:
category = 'material_chrome'
elif 'ALPHA' in line:
category = 'material_transparent'
else:
|
category = 'material_normal'
name = '"' + ns[2].replace('_', ' ')
idx = int(ns[4])
color = ns[6][1:]
edge = ns[8][1:]
|
cr = int(color[0:2], 16)
cg = int(color[2:4], 16)
cb = int(color[4:6], 16)
ca = 255
lumi = 0
for i in range(len(ns)):
if ns[i] == 'ALPHA':
ca = int(ns[i+1])
elif ns[i] == 'LUMINANCE':
lumi = int(ns[i+1])
er = int(edge[0:2], 16)
eg = int(edge[2:4], 16)
eb = int(edge[4:6], 16)
print "{ %20s, {%3d, %3d, %3d, %3d}, {%3d, %3d, %3d, 255}, %2d, %3d, %32s\", 0L }," % (category, cr, cg, cb, ca, er, eg, eb, lumi, idx, name)
|
elterminad0r/cipher_tools
|
src/scripts/freq_analysis.py
|
Python
|
gpl-3.0
| 1,559
| 0.009622
|
#!/usr/bin/env python3
"""
Perform frequency analysis on text. This is already provided by !f, this script
exists for other reasons.
"""
import sys
import argparse
import re
from collections import Counter
def parse_args():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("input", type=argparse.FileType("r"), help="input file")
parser.add_argument("-l", "--length", type=int, default=1,
help="Vigenere-style key length")
return parser.parse_args()
def IOC(cnt):
total = sum(cnt.values())
if total:
return (sum(freq ** 2 - freq for freq in cnt.values())
|
/ (total ** 2 - total))
else:
return -1
def printchart(hist, start, interval, width=80):
(_, highest), = hist.most_common(1)
highw = len(str(highest))
return ("IOC {:.4f}\nInterval [{}::{}]\n{}"
.format(IOC(hist), start, interval,
("\n".join("{!r} ({:{highw}}) {}"
.format(letter, frequency,
|
"-" * int(width * frequency / highest),
highw=highw)
for letter, frequency in hist.most_common()))))
def histogram(text, start, interval):
return Counter(re.findall("[a-zA-Z]", text)[start::interval])
if __name__ == "__main__":
args = parse_args()
plain = args.input.read()
for i in range(args.length):
print(printchart(histogram(plain, i, args.length), i, args.length))
|
kaplun/inspire-next
|
inspirehep/modules/refextract/__init__.py
|
Python
|
gpl-3.0
| 1,003
| 0
|
# -*- coding: utf-8 -*-
#
# This file is part of INSPIRE.
# Copyright (C) 2014-2017 CERN.
#
# INSPIRE is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as
|
published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# INSPIRE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with INSPIRE. If not, see <http://www.gnu.org/l
|
icenses/>.
#
# In applying this license, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""RefExtract integration."""
from __future__ import absolute_import, division, print_function
|
jsocko515/python-lust
|
lust/config.py
|
Python
|
bsd-3-clause
| 359
| 0.002786
|
from ConfigParser import SafeConfigParser
def load_ini_file(file_name, defaults={}):
config
|
= SafeConfigParser()
config.readfp(open(file_name))
results = {}
for section in config.sections():
for key, value in config.items(section):
results[section + '.' + key] = value
|
results.update(defaults)
return results
|
greeneyesproject/testbed-demo
|
gui/pklot/designer.py
|
Python
|
mit
| 8,338
| 0.020029
|
'''
Created on 06/apr/2015
@author: luca
'''
import cv2
import numpy as np
import xml.etree.ElementTree as ET
import xml.dom.minidom
STATE_NONE = 0
STATE_DRAW = 1
STATE_DELETE = 2
POINT_RADIUS = 3
POINT_COLOR_CANDIDATE = (0,127,255)
LINE_COLOR_CANDIDATE = (0,127,255)
LINE_THICK_CANDIDATE = 2
LINE_COLOR = (0,192,0)
LINE_THICK = 2
windowName = "parking"
parkingName = "deib"
def point_inside_polygon(point,poly):
x,y=point
n = len(poly)
inside =False
p1x,p1y = poly[0]
for i in range(n+1):
p2x,p2y = poly[i % n]
if y > min(p1y,p2y):
if y <= max(p1y,p2y):
if x <= max(p1x,p2x):
if p1y != p2y:
xinters = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x == p2x or x <= xinters:
inside = not inside
p1x,p1y = p2x,p2y
return inside
def intersect(m1,a1,m2,a2):
'''
a1, a2 in radians
'''
A = np.matrix([[1, -np.tan(a1)],[1,-np.tan(a2)]])
b = np.array([m1[1]-m1[0]*np.tan(a1),m2[1]-m2[0]*np.tan(a2)])
x = np.linalg.solve(A,b)
x = np.round(x,0).astype(np.int)
return (x[1],x[0])
def meanAngle(point1,point2):
'''
Angles in [-90;90)
'''
mid = (np.array(point1,np.float) + np.array(point2,np.float))/2
if (point1[1]-point2[1]==0):
angle = 0
elif (point1[0]-point2[0]==0):
angle = -90
else:
angle = np.degrees(np.arctan(1.*(point1[1]-point2[1])/(point1[0]-point2[0])))
return mid, angle
def rectify(quadr):
# Determine midpoints and angular coefficients of the segments ---
m1,a1 = meanAngle(quadr[0],quadr[1])
m2,a2 = meanAngle(quadr[1],quadr[2])
m3,a3 = meanAngle(quadr[2],quadr[3])
m4,a4 = meanAngle(quadr[3],quadr[0])
# Average the angles ---
if (np.sign(a1) == np.sign(a3)):
avgAngle1_3 = np.mean((a1,a3))
else:
if np.abs(a1) > 45:
avgAngle1_3 = np.mean(np.abs((a1,a3)))
else:
avgAngle1_3 = np.mean((a1,a3))
if (np.sign(a2) == np.sign(a4)):
avgAngle2_4 = np.mean((a2,a4))
else:
if np.abs(a2) > 45:
avgAngle2_4 = np.mean(np.abs((a2,a4)))
else:
avgAngle2_4 = np.mean((a2,a4))
if (avgAngle2_4 >= 0):
avgAngle2_4 -= 90
else:
avgAngle2_4 += 90
if (np.sign(avgAngle1_3) == np.sign(avgAngle2_4)):
avgAngle = np.mean((avgAngle1_3,avgAngle2_4))
else:
if np.abs(avgAngle1_3) > 45:
avgAngle = np.mean(np.abs((avgAngle1_3,avgAngle2_4)))
else:
avgAngle = np.mean((avgAngle1_3,avgAngle2_4))
a1 = np.radians(avgAngle)
a3 = a1
a2 = avgAngle + 90
if (a2 >= 90):
a2 -= 180
a2 = np.radians(a2)
a4 = a2
# Determine the intersection points between the 4 new lines ---
p1 = intersect(m1, a1, m2, a2)
p2 = intersect(m2, a2, m3, a3)
p3 = intersect(m3, a3, m4, a4)
p4 = intersect(m4, a4, m1, a1)
rect = [p1,p2,p3,p4]
center = np.mean(rect,axis=0).astype(np.int)
angle = np.floor(avgAngle-90).astype(np.int)
w = np.linalg.norm(np.array(p1)-np.array(p2)).astype(np.int)
h = np.linalg.norm(np.array(p2)-np.array(p3)).astype(np.int)
if (w>h):
angle +=90
(w,h)=(h,w)
rotatedRect = (center,angle,w,h)
return rect,rotatedRect
def redrawImg(data):
img = data['originalImg'].copy()
for rect,rot_rect in zip(data['rectangles'],data['rotatedRectangles']):
cv2.line(img,rect[0],rect[1],LINE_COLOR,LINE_THICK)
cv2.line(img,rect[1],rect[2],LINE_COLOR,LINE_THICK)
cv2.line(img,rect[2],rect[3],LINE_COLOR,LINE_THICK)
cv2.line(img,rect[3],rect[0],LINE_COLOR,LINE_THICK)
cv2.circle(img,tuple(rot_rect[0]),np.floor(0.5*min(rot_rect[2:4])).astype(np.int),LINE_COLOR,LINE_THICK);
cv2.imshow(windowName,img)
data['currentImg'] = img
def onMouse(event,x,y,flags,data):
if (event == cv2.EVENT_LBUTTONUP):
point = (x,y)
if (data['status'] == STATE_DRAW):
# Draw the point ---
img = data['currentImg']
cv2.circle(img,point,POINT_RADIUS,POINT_COLOR_CANDIDATE,-1)
cv2.imshow(windowName,img)
data['currentImg'] = img
# Draw the line from the previous point, if any ---
numPreviousPoints = len(data['candRect'])
if numPreviousPoints > 0 and numPreviousPoints < 3:
cv2.line(img,data['candRect'][numPreviousPoints-1],point,LINE_COLOR_CANDIDATE,LINE_THICK_CANDIDATE)
cv2.imshow(windowName,img)
data['currentImg'] = img
# Add the point to the candidate rectangle ---
data['candRect'] += [point]
elif numPreviousPoints == 3:
# Close the rectangle if this is the fourth point ---
newRect = data['candRect'] + [point]
_,newRotatedRect = rectify(newRect)
data['rectangles'] += [newRect]
data['rotatedRectangles'] += [newRotatedRect]
redrawImg(data);
data['candRect'] = []
data['status'] = STATE_NONE
else:
# Add the point to the candidate rectangle ---
data['candRect'] += [point]
elif (data['status'] == STATE_DELETE):
found = False;
for idx,rect in enumerate(data['rectangles']):
if (point_inside_polygon(point,rect)):
found = True
break
if (found):
del data['rectangles'][idx]
del data['rotatedRectangles'][idx]
redrawImg(data);
data['status'] = STATE_NONE
def main():
print('+'+'-'*10+' Parking Lot Designer v1 '+'-'*10+'+')
print('| Press "n" to define a new parking lot'+' '*7+'|')
print('| Press "d" to delete an existing parking lot'+' '*1+'|')
print('| Press "w" to save the actual configuration'+' '*2+'|')
print('| Press "q" to quit'+' '*27+'|')
print('+'+'-'*45+'+')
imgPath = "camera11.jpg"
xmlPath = "camera11.xml"
img = cv2.imread(imgPath)
cv2.namedWindow(windowName)
cv2.imshow(windowName,img)
drawingStatus = {
"status":STATE_NONE,
"ca
|
ndRect":[],
"originalImg":img,
"currentImg":img.copy(),
|
"rectangles":[],
"rotatedRectangles":[],
}
cv2.setMouseCallback(windowName,onMouse,drawingStatus)
pressedKey = -1
while (pressedKey != ord('q')):
pressedKey = cv2.waitKey(0)
if (pressedKey==ord('n')):
drawingStatus['status'] = STATE_DRAW
drawingStatus['candRect'] = []
redrawImg(drawingStatus);
elif(pressedKey==ord('d')):
drawingStatus['status'] = STATE_DELETE
drawingStatus['candRect'] = []
redrawImg(drawingStatus);
elif(pressedKey==ord('w')):
print('Preparing XML')
xmlParking = ET.Element("parking",id=parkingName)
for idx,(rect,rotRect) in enumerate(zip(drawingStatus['rectangles'],drawingStatus['rotatedRectangles'])):
xmlSpace = ET.SubElement(xmlParking, "space", id=str(idx+1))
xmlRotRect = ET.SubElement(xmlSpace, "rotatedRect")
ET.SubElement(xmlRotRect, "center", x=str(rotRect[0][0]),y=str(rotRect[0][1]))
ET.SubElement(xmlRotRect, "size", w=str(rotRect[2]),h=str(rotRect[3]))
ET.SubElement(xmlRotRect, "angle", d=str(rotRect[1]))
xmlContour = ET.SubElement(xmlSpace, "contour")
for point in rect:
ET.SubElement(xmlContour, "point", x=str(point[0]),y=str(point[1]))
print('Saving to ' + xmlPath)
xmlString = ET.tostring(xmlParking)
xmlDom = xml.dom.minidom.parseString(xmlString)
prettyXmlString = xmlDom.toprettyxml
|
shuoli84/gevent_socketio2
|
socketio/binary.py
|
Python
|
mit
| 2,907
| 0
|
# coding=utf-8
"""
Binary class deconstruct, reconstruct packet
"""
import copy
class Binary(object):
@staticmethod
def deconstruct_packet(packet):
"""
Replaces every bytearray in packet with a numbered placeholder.
:param packet:
:return: dict with packet and list of buffers
"""
buffers = []
packet_data = packet.get('data', None)
def _deconstruct_packet(data):
if type(data) is bytearr
|
ay:
place_holder = {
'_placeholder': True,
|
'num': len(buffers)
}
buffers.append(data)
return place_holder
if type(data) is list:
new_data = []
for d in data:
new_data.append(_deconstruct_packet(d))
return new_data
if type(data) is dict:
new_data = {}
for k, v in data.items():
new_data[k] = _deconstruct_packet(v)
return new_data
return data
pack = copy.copy(packet)
pack['data'] = _deconstruct_packet(packet_data)
pack['attachments'] = len(buffers)
return {
'packet': pack,
'buffers': buffers
}
@staticmethod
def reconstruct_packet(packet, buffers):
def _reconstruct_packet(data):
if type(data) is dict:
if '_placeholder' in data:
buf = buffers[data['num']]
return buf
else:
for k, v in data.items():
data[k] = _reconstruct_packet(v)
return data
if type(data) is list:
for i in xrange(len(data)):
data[i] = _reconstruct_packet(data[i])
return data
return data
packet['data'] = _reconstruct_packet(packet['data'])
del packet['attachments']
return packet
@staticmethod
def remove_blobs(data):
def _remove_blobs(obj, cur_key=None, containing_obj=None):
if not obj:
return obj
try:
# Try to read it as a file
buf = bytearray(obj.read())
if containing_obj is not None and cur_key is not None:
containing_obj[cur_key] = buf
else:
return buf
except AttributeError:
pass
if type(obj) is list:
for index, item in enumerate(obj):
_remove_blobs(item, index, obj)
if type(obj) is dict:
for k, v in obj.items():
_remove_blobs(v, k, obj)
return obj
blobless_data = _remove_blobs(data)
return blobless_data
|
godiard/memorize-activity
|
game.py
|
Python
|
gpl-2.0
| 14,959
| 0.000067
|
# Copyright (C) 2006, 2007, 2008 One Laptop per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import logging
from gi.repository import GLib
from gi.repository import GObject
from os.path import join
from gettext import gettext as _
from model import Model
from audio import Audio
FLOP_BACK_TIMEOUT = 2000
class MemorizeGame(GObject.GObject):
__gsignals__ = {
'reset_scoreboard': (GObject.SignalFlags.RUN_FIRST, None, []),
'reset_table': (GObject.SignalFlags.RUN_FIRST, None, []),
'load_mode': (GObject.SignalFlags.RUN_FIR
|
ST, None,
[GObject.TYPE_PYOBJECT]),
'load_game': (GObject.Signa
|
lFlags.RUN_FIRST, None,
2 * [GObject.TYPE_PYOBJECT]),
'change_game': (GObject.SignalFlags.RUN_FIRST, None,
2 * [GObject.TYPE_PYOBJECT]),
'change_game_signal': (GObject.SignalFlags.RUN_FIRST, None,
5 * [GObject.TYPE_PYOBJECT]),
'set-border': (GObject.SignalFlags.RUN_FIRST, None,
3 * [GObject.TYPE_PYOBJECT]),
'flip-card': (GObject.SignalFlags.RUN_FIRST, None, [int, bool]),
'flip-card-signal': (GObject.SignalFlags.RUN_FIRST, None, [int]),
'cement-card': (GObject.SignalFlags.RUN_FIRST, None, [int]),
'flop-card': (GObject.SignalFlags.RUN_FIRST, None, [int]),
'highlight-card': (GObject.SignalFlags.RUN_FIRST, None,
2 * [GObject.TYPE_PYOBJECT]),
'add_buddy': (GObject.SignalFlags.RUN_FIRST, None,
2 * [GObject.TYPE_PYOBJECT]),
'rem_buddy': (GObject.SignalFlags.RUN_FIRST, None,
[GObject.TYPE_PYOBJECT]),
'increase-score': (GObject.SignalFlags.RUN_FIRST, None,
[GObject.TYPE_PYOBJECT]),
'wait_mode_buddy': (GObject.SignalFlags.RUN_FIRST, None,
2 * [GObject.TYPE_PYOBJECT]),
'msg_buddy': (GObject.SignalFlags.RUN_FIRST, None,
2 * [GObject.TYPE_PYOBJECT]),
'change-turn': (GObject.SignalFlags.RUN_FIRST, None,
[GObject.TYPE_PYOBJECT]), }
def __init__(self):
GObject.GObject.__init__(self)
self.myself = None
self.players_score = {}
self.players = []
self.waiting_players = []
self.current_player = None
self.last_flipped = -1
self.last_highlight = 1
self._flop_card_timeout = -1
self.sentitive = True
self.model = Model()
self.flip_block = False
self._flop_cards = None
self.audio = Audio()
self._audio_play_finished_id = 0
def load_game(self, game_name, size, mode):
self.set_load_mode('Loading game')
if self.model.read(game_name) == 0:
logging.debug('load_game set is_demo mode %s', mode)
self.model.is_demo = (mode == 'demo')
self.model.def_grid(size)
self.model.data['running'] = 'False'
self.model.data['mode'] = mode
logging.debug(' Read setup file %r: %r ',
game_name, self.model.grid)
self.emit('load_game', self.model.data, self.model.grid)
else:
logging.error(' Reading setup file %s', game_name)
def load_remote(self, grid, data, mode, signal=False):
self.set_load_mode(_('Loading game...'))
self.model.grid = grid
self.model.data = data
self.model.data['mode'] = mode
self.emit('reset_scoreboard')
if not signal:
self.emit('change_game_signal', mode, self.get_grid(),
self.model.data, self.waiting_players,
self.model.data['game_file'])
self.emit('change_game', self.model.data, self.get_grid())
for buddy in self.players:
self.players_score[buddy] = 0
self.current_player = None
self.last_flipped = -1
self.last_highlight = 1
self.change_turn()
self.model.data['running'] = 'False'
for card in self.model.grid:
if len(card) > 0:
if card['state'] == '1':
self.emit('flip-card', self.model.grid.index(card), False)
self.last_flipped = self.model.grid.index(card)
elif card['state'] != '0':
stroke_color, fill_color = card['state'].split(',')
self.emit('flip-card', self.model.grid.index(card), False)
self.emit('set-border', self.model.grid.index(card),
stroke_color, fill_color)
else:
continue
logging.debug('load_remote set is_demo mode %s', mode)
if mode != 'reset':
self.model.is_demo = (mode == 'demo')
def add_buddy(self, buddy, score=0):
logging.debug('Buddy %r was added to game', buddy.props.nick)
self.players.append(buddy)
def key(a):
return a.props.nick
self.players = sorted(self.players, key=key)
self.players_score[buddy] = score
self.emit('add_buddy', buddy, score)
logging.debug(str(buddy))
if self.current_player is None:
self.current_player = buddy
self.change_turn()
def rem_buddy(self, buddy):
logging.debug('Buddy %r was removed from game', buddy.props.nick)
if self.current_player == buddy and len(self.players) >= 2:
if self.last_flipped != -1:
self.emit('flop-card', self.last_flipped)
self.model.grid[self.last_flipped]['state'] = '0'
self.last_flipped = -1
self.change_turn()
index = self.players.index(buddy)
del self.players[index]
del (self.players_score[buddy])
self.emit('rem_buddy', buddy)
def buddy_message(self, buddy, text):
self.emit('msg_buddy', buddy, text)
def update_turn(self):
self.set_sensitive(self.current_player == self.myself)
self.emit('change-turn', self.current_player)
def change_turn(self):
if len(self.players) <= 1:
self.current_player = self.players[0]
if self.current_player is None:
self.current_player = self.players[0]
elif self.current_player == self.players[-1]:
self.current_player = self.players[0]
else:
next_player = self.players.index(self.current_player) + 1
self.current_player = self.players[next_player]
self.update_turn()
def card_flipped(self, widget, identifier, signal=False):
self.model.count = self.model.count + 1
# Check if is my turn
if (not self.sentitive and not signal) or \
self.last_flipped == identifier:
return
# Handle groups if needed
if self.model.data.get('divided') == '1':
if self.last_flipped == -1 and identifier \
>= (len(self.model.grid) // 2):
return
if self.last_flipped != -1 and identifier \
< (len(self.model.grid) // 2):
return
# do not process flips when flipping back
if self.flip_block:
return
else:
self.flip_block = True
self.model.data['running'] = 'True'
def flip_card(full_animation):
self.emit('flip-card', identifier, full_animation)
|
hermco/jenkins_cli_tool
|
setup.py
|
Python
|
mit
| 521
| 0
|
from d
|
istutils.core import setup
setup(
name='jenkins_cli_tool',
version='3.1',
packages=['cli', 'cli.startjob', 'cli.startAndMonitor', 'tests'],
url='https://github.com/hermco/jenkins_cli_tool',
license='MIT',
author='chermet',
author_email='chermet@axway.com',
description='CLI tool for Jenkins',
install_requires=[
'click',
'python-jenkins'
],
entry_points={
'conso
|
le_scripts': [
'jenkins-cli-tool = cli.cli:entry_point'
]
}
)
|
BenLand100/chatlogs
|
wordprofile.py
|
Python
|
gpl-3.0
| 2,410
| 0.009129
|
#!/usr/bin/env python3
'''
* Copyright 2015 by Benjamin J. Land (a.k.a. BenLand100)
*
* This file is part of chatlogs.
*
* chatlogs is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
|
* (at your option) any later version.
*
* chatlogs is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with chatlogs. If not, see <http://www.gnu.org/licenses/>.
'''
import col
|
lections
import nltk
import string
import tools
import json
import sys
import re
import multiprocessing
import enchant
if len(sys.argv) < 5:
print('./wordprofile.py database num splittype maxlen src+')
print('\tsplittype can be one of: nltk, regex, respell')
sys.exit(1)
db = tools.database(sys.argv[1])
maxlen = int(sys.argv[4])
query = ' OR '.join(['src LIKE ?' for i in range(len(sys.argv)-5)])
args = tuple(sys.argv[5:])
words = collections.Counter()
if sys.argv[3] == 'nltk':
for i in db.get_iter(query,args):
thin = ' '.join([x.lower() for x in i.msg.split(' ') if len(x) <= maxlen])
words.update(nltk.word_tokenize(thin))
elif sys.argv[3] == 'regex':
wordtokregex = re.compile('([\w'']+|[\:\=][^ ])')
for i in db.get_iter(query,args):
thin = ' '.join([x.lower() for x in i.msg.split(' ') if len(x) <= maxlen])
words.update([word for word in wordtokregex.findall(thin)])
elif sys.argv[3][0:7] == 'respell':
try:
maxdist = int(sys.argv[3].split(':')[1])
except:
maxdist = 0
wordtokregex = re.compile('([\w\']+|[\:\=][^ ])')
sgst = tools.suggester(maxdist)
for i in db.get_iter(query,args):
parts = ' '.join([x.upper() for x in i.msg.split(' ') if len(x) <= maxlen])
parts = [word for word in wordtokregex.findall(parts)]
parts = [sgst.suggest(word) for word in parts]
words.update([word for word in parts if word])
print('"---total---"',',',sum(words.values()))
print('"---unique---"',',',len(set(words)))
[print('"'+word[0]+'",',word[1]) for word in words.most_common(int(sys.argv[2]))]
|
6WIND/scapy
|
scapy/compat.py
|
Python
|
gpl-2.0
| 4,048
| 0.000247
|
# This file is part of Scapy
# See http://www.secdev.org/projects/scapy for more information
# Copyright (C) Philippe Biondi <phil@secdev.org>
# Copyright (C) Gabriel Potter <gabriel@potter.fr>
# This program is published under a GPLv2 license
"""
Python 2 and 3 link classes.
"""
from __future__ import absolute_import
import base64
import binascii
import gzip
import struct
import sys
import scapy.modules.six as six
###########
# P
|
ython3 #
###########
def lambda_tuple_converter(func):
"""
Converts a Python 2 function as
lambda (x,y): x + y
In the Python 3 format:
lambda x,y : x + y
"""
if func is not None and func.__code
|
__.co_argcount == 1:
return lambda *args: func(args[0] if len(args) == 1 else args)
else:
return func
if six.PY2:
bytes_encode = plain_str = str
chb = lambda x: x if isinstance(x, str) else chr(x)
orb = ord
def raw(x):
"""Builds a packet and returns its bytes representation.
This function is and always be cross-version compatible"""
if hasattr(x, "__bytes__"):
return x.__bytes__()
return bytes(x)
else:
def raw(x):
"""Builds a packet and returns its bytes representation.
This function is and always be cross-version compatible"""
return bytes(x)
def bytes_encode(x):
"""Ensure that the given object is bytes.
If the parameter is a packet, raw() should be preferred.
"""
if isinstance(x, str):
return x.encode()
return bytes(x)
if sys.version_info[0:2] <= (3, 4):
def plain_str(x):
"""Convert basic byte objects to str"""
if isinstance(x, bytes):
return x.decode(errors="ignore")
return str(x)
else:
# Python 3.5+
def plain_str(x):
"""Convert basic byte objects to str"""
if isinstance(x, bytes):
return x.decode(errors="backslashreplace")
return str(x)
def chb(x):
"""Same than chr() but encode as bytes."""
return struct.pack("!B", x)
def orb(x):
"""Return ord(x) when not already an int."""
if isinstance(x, int):
return x
return ord(x)
def bytes_hex(x):
"""Hexify a str or a bytes object"""
return binascii.b2a_hex(bytes_encode(x))
def hex_bytes(x):
"""De-hexify a str or a byte object"""
return binascii.a2b_hex(bytes_encode(x))
def base64_bytes(x):
"""Turn base64 into bytes"""
if six.PY2:
return base64.decodestring(x)
return base64.decodebytes(bytes_encode(x))
def bytes_base64(x):
"""Turn bytes into base64"""
if six.PY2:
return base64.encodestring(x).replace('\n', '')
return base64.encodebytes(bytes_encode(x)).replace(b'\n', b'')
if six.PY2:
from StringIO import StringIO
def gzip_decompress(x):
"""Decompress using gzip"""
with gzip.GzipFile(fileobj=StringIO(x), mode='rb') as fdesc:
return fdesc.read()
def gzip_compress(x):
"""Compress using gzip"""
buf = StringIO()
with gzip.GzipFile(fileobj=buf, mode='wb') as fdesc:
fdesc.write(x)
return buf.getvalue()
else:
gzip_decompress = gzip.decompress
gzip_compress = gzip.compress
# Typing compatibility
try:
# Only required if using mypy-lang for static typing
from typing import Optional, List, Union, Callable, Any, AnyStr, Tuple, \
Sized, Dict, Pattern, cast
except ImportError:
# Let's make some fake ones.
def cast(_type, obj):
return obj
class _FakeType(object):
# make the objects subscriptable indefinetly
def __getitem__(self, item):
return _FakeType()
Optional = _FakeType()
Union = _FakeType()
Callable = _FakeType()
List = _FakeType()
Dict = _FakeType()
Any = _FakeType()
AnyStr = _FakeType()
Tuple = _FakeType()
Pattern = _FakeType()
class Sized(object):
pass
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.