repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
| prefix
stringlengths 0
8.16k
| middle
stringlengths 3
512
| suffix
stringlengths 0
8.17k
|
|---|---|---|---|---|---|---|---|---|
woutdenolf/spectrocrunch
|
scraps/ffnoisesimul.py
|
Python
|
mit
| 9,053
| 0.00232
|
# -*- coding: utf-8 -*-
import os, sys
sys.path.insert(1, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from spectrocrunch.materials.compoundfromformula import compoundfromformula
from spectrocrunch.materials.compoundfromname import compoundfromname
from spectrocrunch.materials.mixture import mixture
from spectrocrunch.materials.types import fraction
from spectrocrunch.simulation import calcnoise
from spectrocrunch.simulation import materials
from spectrocrunch.math import noisepropagation
import numpy as np
import scipy.optimize
import matplotlib.pyplot as plt
class sample(object):
@staticmethod
def getnframes(totaltime, frametime, fracflat):
n = int(round(totaltime / frametime))
nflat = max(int(round(fracflat * n / 2.0)), 1)
nflat *= 2 # before and after
ndata = max(n - nflat, 1)
return ndata, nflat
@staticmethod
def getrealtime(totaltime, frametime, fracflat):
ndata, nflat = self.getnframes(totaltime, frametime, fracflat)
n = ndata + nflat
overhead = 6.50305 + 0.0131498 * n
return frametime * n + overhead
def xanes(
self, I0, energy, totaltime=None, frametime=None, fracflat=None, ndark=None
):
ndata, nflat = self.getnframes(totaltime, frametime, fracflat)
energy = np.asarray(energy)
N, N0, D, D0 = calcnoise.id21_ffnoise(
I0,
energy,
self.composition,
tframe_data=frametime,
nframe_data=ndata,
tframe_flat=frametime,
nframe_flat=nflat,
nframe_dark=ndark,
)
T = calcnoise.transmission(
N,
N0,
D=D,
D0=D0,
tframe_data=frametime,
nframe_data=ndata,
tframe_flat=frametime,
nframe_flat=nflat,
nframe_dark=ndark,
)
XAS = calcnoise.absorbance(T)
signal = noisepropagation.E(XAS)
noise = noisepropagation.S(XAS)
return signal, noise
def costfunc(self, I0, energy, **kwargs):
signal, noise = self.xanes(I0, energy, **kwargs)
# return np.max(noise/signal*100)
return np.mean(noise) / (signal[-1] - signal[0])
def __str__(self):
return str(self.composition)
def plotxanesnoise(self, I0, energy, **kwargs):
signal, noise = self.xanes(I0, energy, **kwargs)
plt.plot(energy, noise / signal * 100)
plt.xlabel("Energy (keV)")
plt.ylabel("N/S (%)")
def plotxanes(self, I0, energy, **kwargs):
signal, _ = self.xanes(I0, energy, **kwargs)
plt.plot(energy, signal)
plt.xlabel("Energy (keV)")
plt.ylabel("Absorbance")
class sample_hg115(sample):
def __init__(self, wpigment=10, paintthickness=10):
binder = compoundfromname("linseed oil")
pigment = compoundfromname("verdigris")
paint = mixture(
[binder, pigment], [1 - wpigment / 100.0, wpigment / 100.0], fraction.mass
)
ultralene = compoundfromname("ultralene")
sfreetape = compoundfromname("sulfur-free tape")
# ult
|
ralene = compoundfromname("vacuum")
# sfreetape = compoundfromname("vacuum")
m = [ultralene, paint, sfreetape]
thickness = [4, paintthickness, 10]
# m = [compoundfromname("vacuum"),compoundfromname("vacuum"),compoundfromname("vacuum")]
self.composition = materials.factory(
"Multilayer",
material=m,
thickne
|
ss=thickness,
anglein=0,
angleout=0,
azimuth=0,
)
self.paintindex = 1
def set_wpigment(self, wpigment):
w = self.composition.material[self.paintindex].massfractions()
w["verdigris"] = wpigment / 100.0
w["linseed oil"] = 1 - wpigment / 100.0
self.composition.material[self.paintindex].change_fractions(w, fraction.mass)
def get_wpigment(self):
return (
self.composition.material[self.paintindex].massfractions()["verdigris"]
* 100
)
def set_paintthickness(self, paintthickness):
self.composition.thickness[self.paintindex] = paintthickness
def get_paintthickness(self):
return self.composition.thickness[self.paintindex]
def optimize_thickness(self, I0, energy, **kwargs):
def costfunc(paintthickness):
self.set_paintthickness(paintthickness[0])
c = self.costfunc(I0, energy, **kwargs)
return c
guess = self.get_paintthickness()
result = scipy.optimize.least_squares(costfunc, guess, gtol=1e-015, ftol=1e-015)
print result.message
return result.x[0], result.success
def optimize_wpigment(self, I0, energy, **kwargs):
def costfunc(wpigment):
self.set_wpigment(wpigment[0])
c = self.costfunc(I0, energy, **kwargs)
return c
guess = self.get_wpigment()
result = scipy.optimize.least_squares(
costfunc, guess, bounds=([0, 100]), gtol=1e-015, ftol=1e-015
)
print result.message
return result.x[0], result.success
def optimize_thickness_plot(self, I0, energy, **kwargs):
thickness = self.get_paintthickness()
t = np.linspace(max(thickness - 100, 0), thickness + 100, 50)
r = np.zeros(len(t))
for i, paintthickness in enumerate(t):
self.set_paintthickness(paintthickness)
r[i] = self.costfunc(I0, energy, **kwargs)
self.set_paintthickness(thickness)
plt.plot(t, 1 / r, "-o", label="{} %".format(self.get_wpigment()))
plt.xlabel("thickness ($\mu$m)")
plt.ylabel("Jump-to-noise")
def optimize_wpigment_plot(self, I0, energy, **kwargs):
w = self.get_wpigment()
t = np.linspace(0, 20, 50)
r = np.zeros(len(t))
for i, wpigment in enumerate(t):
self.set_wpigment(wpigment)
r[i] = self.costfunc(I0, energy, **kwargs)
self.set_wpigment(w)
plt.plot(t, 1 / r, "-o", label="{} $\mu$m".format(self.get_paintthickness()))
plt.xlabel("Verdigris (%)")
plt.ylabel("Jump-to-noise")
def optimize(self, I0, energy, **kwargs):
def costfunc(p):
self.set_wpigment(p[0])
self.set_paintthickness(p[1])
return self.costfunc(I0, energy, **kwargs)
guess = (self.get_wpigment(), self.get_paintthickness())
result = scipy.optimize.least_squares(
costfunc, guess, bounds=([0, 0], [100, 1e6]), gtol=1e-015
)
print result.message
return result.x, result.success
def hg115_ff():
sample = sample_hg115()
I0 = 1e6
energy = np.linspace(8.9, 9.3, 100)
totaltime = 70
frametime = 0.07
fracflat = 1 / 3.0
ndark = 30
kwargs = {
"totaltime": totaltime,
"frametime": frametime,
"fracflat": fracflat,
"ndark": ndark,
}
opt = 1
energyopt = [8.97, 9]
if opt == 0:
sample.set_wpigment(10)
t, s = sample.optimize_thickness(I0, energyopt, **kwargs)
sample.set_paintthickness(t)
elif opt == 1:
sample.set_paintthickness(20)
w, s = sample.optimize_wpigment(I0, energyopt, **kwargs)
sample.set_wpigment(w)
else:
wt, s = sample.optimize(I0, energy, **kwargs)
sample.set_wpigment(wt[0])
sample.set_paintthickness(wt[1])
print "Thickness = {} μm".format(sample.get_paintthickness())
print "Verdigris = {} wt%".format(sample.get_wpigment())
print "Jump to noise = {}".format(1 / sample.costfunc(I0, energyopt, **kwargs))
print ""
plt.figure()
for thickness in [10, 15, 20]:
sample.set_paintthickness(thickness)
sample.optimize_wpigment_plot(I0, energy, **kwargs)
plt.legend(loc="best")
plt.show()
exit()
sample.optimize_thickness_plot(I0, energy, **kwargs)
sample.optimize_wpigment_plot(I0, energy, **kwargs)
plt.figure()
sample.plotxanes(I0, energy, **kwargs)
plt.figure()
sample.p
|
islamgulov/libcloud.rest
|
libcloud_rest/api/urls.py
|
Python
|
apache-2.0
| 704
| 0
|
# -*- coding:utf-8 -*-
from werkzeug.routing import Map, Submount
import libcloud
from libcloud_rest.api.handlers import app_handler
from libcloud_rest.api.handlers.compute import compute_handler
from libcloud_rest.api.handlers.dns import dns_handler
from libcloud_rest.api.handlers
|
.loadbalancer import lb_handler
from libcloud_rest.api.handlers.storage import storage_handler
from libcloud_rest.api.versions import versions
api_version = '/%s' % (versions[libcloud.__version__])
urls = Map([
app_handler.get_rules(),
Submount(api_version, [
compute_handler.get_rules(),
dns_handler.get_rules(),
|
lb_handler.get_rules(),
storage_handler.get_rules(),
])
])
|
uniite/pyirc
|
test_msg.py
|
Python
|
mit
| 63
| 0
|
from models.pusher impor
|
t push
print re
|
pr(push("Bazinga..."))
|
Lyleo/OmniMarkupPreviewer
|
OmniMarkupLib/Renderers/libs/python3/docutils/transforms/universal.py
|
Python
|
mit
| 10,307
| 0.001067
|
# $Id$
# -*- coding: utf8 -*-
# Authors: David Goodger <goodger@python.org>; Ueli Schlaepfer; Günter Milde
# Maintainer: docutils-develop@lists.sourceforge.net
# Copyright: This module has been placed in the public domain.
"""
Transforms needed by most or all documents:
- `Decorations`: Generate a document's header & footer.
- `Messages`: Placement of system messages stored in
`nodes.document.transform_messages`.
- `TestMessages`: Like `Messages`, used on test runs.
- `FinalReferences`: Resolve remaining references.
"""
__docformat__ = 'reStructuredText'
import re
import sys
import time
from docutils import nodes, utils
from docutils.transforms import TransformError, Transform
from docutils.utils import smartquotes
class Decorations(Transform):
"""
Populate a document's decoration element (header, footer).
"""
default_priority = 820
def apply(self):
header_nodes = self.generate_header()
if header_nodes:
decoration = self.document.get_decoration()
header = decoration.get_header()
header.extend(header_nodes)
footer_nodes = self.generate_footer()
if footer_nodes:
decoration = self.document.get_decoration()
footer = decoration.get_footer()
footer.extend(footer_nodes)
def generate_header(self):
return None
def generate_footer(self):
# @@@ Text is hard-coded for now.
# Should be made dynamic (language-dependent).
settings = self.document.settings
if settings.generator or settings.datestamp or settings.source_link \
or settings.source_url:
text = []
if settings.source_link and settings._source \
or settings.source_url:
if settings.source_url:
source = settings.source_url
else:
source = utils.relative_path(settings._destination,
settings._source)
text.extend([
nodes.reference('', 'View document source',
refuri=source),
nodes.Text('.\n')])
if settings.datestamp:
datestamp = time.strftime(settings.datestamp, time.gmtime())
text.append(nodes.Text('Generated on: ' + datestamp + '.\n'))
if settings.generator:
text.extend([
nodes.Text('Generated by '),
nodes.reference('', 'Docutils', refuri=
'http://docutils.sourceforge.net/'),
nodes.Text(' from '),
nodes.reference('', 'reStructuredText', refuri='http://'
'docutils.sourceforge.net/rst.html'),
nodes.Text(' source.\n')])
return [nodes.paragraph('', '', *text)]
else:
return None
class ExposeInternals(Transform):
"""
Expose internal attributes if ``expose_internals`` setting is set.
"""
default_priority = 840
def not_Text(self
|
, node):
return not isinstance(node, nodes.Text)
def apply(self):
if self.document.settings.expose_internals:
for no
|
de in self.document.traverse(self.not_Text):
for att in self.document.settings.expose_internals:
value = getattr(node, att, None)
if value is not None:
node['internal:' + att] = value
class Messages(Transform):
"""
Place any system messages generated after parsing into a dedicated section
of the document.
"""
default_priority = 860
def apply(self):
unfiltered = self.document.transform_messages
threshold = self.document.reporter.report_level
messages = []
for msg in unfiltered:
if msg['level'] >= threshold and not msg.parent:
messages.append(msg)
if messages:
section = nodes.section(classes=['system-messages'])
# @@@ get this from the language module?
section += nodes.title('', 'Docutils System Messages')
section += messages
self.document.transform_messages[:] = []
self.document += section
class FilterMessages(Transform):
"""
Remove system messages below verbosity threshold.
"""
default_priority = 870
def apply(self):
for node in self.document.traverse(nodes.system_message):
if node['level'] < self.document.reporter.report_level:
node.parent.remove(node)
class TestMessages(Transform):
"""
Append all post-parse system messages to the end of the document.
Used for testing purposes.
"""
default_priority = 880
def apply(self):
for msg in self.document.transform_messages:
if not msg.parent:
self.document += msg
class StripComments(Transform):
"""
Remove comment elements from the document tree (only if the
``strip_comments`` setting is enabled).
"""
default_priority = 740
def apply(self):
if self.document.settings.strip_comments:
for node in self.document.traverse(nodes.comment):
node.parent.remove(node)
class StripClassesAndElements(Transform):
"""
Remove from the document tree all elements with classes in
`self.document.settings.strip_elements_with_classes` and all "classes"
attribute values in `self.document.settings.strip_classes`.
"""
default_priority = 420
def apply(self):
if not (self.document.settings.strip_elements_with_classes
or self.document.settings.strip_classes):
return
# prepare dicts for lookup (not sets, for Python 2.2 compatibility):
self.strip_elements = dict(
[(key, None)
for key in (self.document.settings.strip_elements_with_classes
or [])])
self.strip_classes = dict(
[(key, None) for key in (self.document.settings.strip_classes
or [])])
for node in self.document.traverse(self.check_classes):
node.parent.remove(node)
def check_classes(self, node):
if isinstance(node, nodes.Element):
for class_value in node['classes'][:]:
if class_value in self.strip_classes:
node['classes'].remove(class_value)
if class_value in self.strip_elements:
return 1
class SmartQuotes(Transform):
"""
Replace ASCII quotation marks with typographic form.
Also replace multiple dashes with em-dash/en-dash characters.
"""
default_priority = 850
def __init__(self, document, startnode):
Transform.__init__(self, document, startnode=startnode)
self.unsupported_languages = set()
def get_tokens(self, txtnodes):
# A generator that yields ``(texttype, nodetext)`` tuples for a list
# of "Text" nodes (interface to ``smartquotes.educate_tokens()``).
texttype = {True: 'literal', # "literal" text is not changed:
False: 'plain'}
for txtnode in txtnodes:
nodetype = texttype[isinstance(txtnode.parent,
(nodes.literal,
nodes.math,
nodes.image,
nodes.raw,
nodes.problematic))]
yield (nodetype, txtnode.astext())
def apply(self):
smart_quotes = self.document.settings.smart_quotes
if not smart_quotes:
return
try:
alternative = smart_quotes.startswith('alt')
except AttributeError:
alternative = False
# print repr(alternative)
document_language = self.document.settings.language_code
# "Educate" quotes in normal text. Handle each block of text
#
|
tbabej/freeipa
|
ipaserver/install/otpdinstance.py
|
Python
|
gpl-3.0
| 948
| 0
|
# Authors: Tomas Babej <tbabej@redhat.com>
#
# Copyri
|
ght (C) 2013 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warrant
|
y of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from ipaserver.install import service
class OtpdInstance(service.SimpleServiceInstance):
def __init__(self):
service.SimpleServiceInstance.__init__(self, "ipa-otpd")
|
StarbuckBG/BTCGPU
|
test/functional/rest.py
|
Python
|
mit
| 15,085
| 0.00822
|
#!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the REST API."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from struct import *
from io import BytesIO
from codecs import encode
import http.client
import urllib.parse
def deser_uint256(f):
r = 0
for i in range(8):
t = unpack(b"<I", f.read(4))[0]
r += t << (i * 32)
return r
#allows simple http get calls
def http_get_call(host, port, path, response_object = 0):
conn = http.client.HTTPConnection(host, port)
conn.request('GET', path)
if response_object:
return conn.getresponse()
return conn.getresponse().read().decode('utf-8')
#allows simple http post calls with a request body
def http_post_call(host, port, path, requestdata = '', response_object = 0):
conn = http.client.HTTPConnection(host, port)
conn.request('POST', path, requestdata)
if response_object:
return conn.getresponse()
return conn.getresponse().read()
class RESTTest (BitcoinTestFramework):
FORMAT_SEPARATOR = "."
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self, split=False):
super().setup_network()
connect_nodes_bi(self.nodes, 0, 2)
def run_test(self):
url = urllib.parse.urlparse(self.nodes[0].url)
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.sync_all()
self.nodes[2].generate(100)
self.sync_all()
assert_equal(self.nodes[0].getbalance(), 50)
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
self.sync_all()
self.nodes[2].generate(1)
self.sync_all()
bb_hash = self.nodes[0].getbestblockhash()
assert_equal(self.nodes[1].getbalance(), Decimal("0.1")) #balance now should be 0.1 on node 1
# load the latest 0.1 tx over the REST API
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.FORMAT_SEPARATOR+"json")
json_obj = json.loads(json_string)
vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
# get n of 0.1 outpoint
n = 0
for vout in json_obj['vout']:
if vout['value'] == 0.1:
n = vout['n']
#######################################
# GETUTXOS: query an unspent outpoint #
#######################################
json_request = '/checkmempool/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
#check chainTip response
assert_equal(json_obj['chaintipHash'], bb_hash)
#make sure there is one utxo
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['utxos'][0]['value'], 0.1)
#################################################
# GETUTXOS: now query an already spent outpoint #
#################################################
json_request = '/checkmempool/'+vintx+'-0'
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
#check chainTip response
assert_equal(json_obj['chaintipHash'], bb_hash)
#make sure there is no utox in the response because this oupoint has been spent
assert_equal(len(json_obj['utxos']), 0)
#check bitmap
assert_equal(json_obj['bitmap'], "0")
##################################################
# GETUTXOS: now check both with the same request #
##################################################
json_request = '/checkmempool/'+txid+'-'+str(n)+'/'+vintx+'-0'
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1)
assert_equal(json_obj['bitmap'], "10")
#test binary response
bb_hash = self.nodes[0].getbestblockhash()
binaryRequest = b'\x01\x02'
binaryRequest += hex_str_to_bytes(txid)
binaryRequest += pack("i", n)
binaryRequest += hex_str_to_bytes(vintx)
binaryRequest += pack("i", 0)
bin_response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', binaryRequest)
output = BytesIO()
output.write(bin_response)
output.seek(0)
chainHeight = unpack("i", output.read(4))[0]
hashFromBinResponse = hex(deser_uint256(output))[2:].zfill(64)
assert_equal(bb_hash, hashFromBinResponse) #check if getutxo's chaintip during calculation was fine
assert_equal(chainHeight, 102) #chain height must be 102
############################
# GETUTXOS: mempool checks #
############################
# do a tx and don't sync
txid = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.1)
json_string = http_get_call(url.hostname, url.port, '/rest/tx/'+txid+self.FORMAT_SEPARATOR+"json")
json_obj = json.loads(json_string)
vintx = json_obj['vin'][0]['txid'] # get the vin to later check for utxo (should be spent by then)
# get n of 0.1 outpoint
n = 0
for vout in json_obj['vout']:
if vout['value'] == 0.1:
n = vout['n']
json_request = '/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 0) #there should be an outpoint because it has just added to the mempool
json_request = '/checkmempool/'+txid+'-'+str(n)
json_string = http_get_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json')
json_obj = json.loads(json_string)
assert_equal(len(json_obj['utxos']), 1) #there should be an outpoint because it has just added to the mempool
#do some invalid requests
json_request = '{"checkmempool'
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self
|
.FORMAT_SEPARATOR+'json', json_request, True)
assert_equal(response.status, 400) #must be a 400 because we send an invalid json request
json_request
|
= '{"checkmempool'
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+self.FORMAT_SEPARATOR+'bin', json_request, True)
assert_equal(response.status, 400) #must be a 400 because we send an invalid bin request
response = http_post_call(url.hostname, url.port, '/rest/getutxos/checkmempool'+self.FORMAT_SEPARATOR+'bin', '', True)
assert_equal(response.status, 400) #must be a 400 because we send an invalid bin request
#test limits
json_request = '/checkmempool/'
for x in range(0, 20):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/")
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
assert_equal(response.status, 400) #must be a 400 because we exceeding the limits
json_request = '/checkmempool/'
for x in range(0, 15):
json_request += txid+'-'+str(n)+'/'
json_request = json_request.rstrip("/")
response = http_post_call(url.hostname, url.port, '/rest/getutxos'+json_request+self.FORMAT_SEPARATOR+'json', '', True)
assert_equal(response.status, 200) #must be a 200 because we are within the limits
self.nodes[0].generate(1) #generate block to not affect upcoming tests
self.sync_all()
################
# /rest/block/ #
|
jerryzhenleicai/lattice
|
lattice/topo_sort.py
|
Python
|
apache-2.0
| 3,768
| 0.00345
|
# Original topological sort code written by Ofer Faigon (www.bitformation.com) and used with permission
# Permission is hereby granted to copy, modify and use this source code for any purpose as long as the above comment line is included with it.
"""
Loop detection and depth calculation added by Zhenlei Cai (c) 2010. All the modifications made to Ofer Faigon's original code are hereby donated to the public domain. They may be copied, modified and used for any purpise.
"""
def topological_sort(items, partial_order):
"""Perform topological sort.
items is a list of items to be sorted.
partial_order is a list of pairs. If pair (a,b) is in it, it means
that item a should appear before item b.
Returns a list of the items in one of the possible orders, or
a tuple (True, [loop]) where loop is a list of items found to
form a loop.
"""
def add_node(graph, node):
"""Add a node to the graph if not already exists."""
if not graph.has_key(node):
graph[node] = [0] # 0 = number of arcs coming into this node.
def add_arc(graph, fromnode, tonode):
"""Add an arc to a graph. Can create multiple arcs.
The end nodes must already exist."""
graph[fromnode].append(tonode)
# Update the count of incoming arcs in tonode.
graph[tonode][0] = graph[tonode][0] + 1
# step 1 - create a directed graph with an arc a->b for each input
# pair (a,b).
# The graph is represented by a dictionary. The dictionary contains
# a pair item:list for each node in the graph. /item/ is the value
# of the node. /list/'s 1st item is the count of incoming arcs, and
# the rest are the destinations of the outgoing arcs. For example:
# {'a':[0,'b','c'], 'b':[1], 'c':[1]}
# represents the graph: c <-- a --> b
# The graph may contain loops and multiple arcs.
# Note that our representation does not contain reference loops to
# cause GC problems even when the represented graph contains loops,
# because we keep the node names rather than references to the nodes.
graph = {}
for v in items:
add_node(graph, v)
for a,b in partial_order:
add_arc(graph, a, b)
# Step 2 - find all roots (nodes with zero incoming arcs).
roots = [node for (node,nodeinfo) in graph.items() if nodeinfo[0] == 0]
# step 3 - repeatedly emit a root and remove it from the
|
graph. Removing
# a node may convert some of the node's direct children into roots.
# Whenever that happens, we append the new roots to the list of
# current roots.
sorted = []
while len(roots) != 0:
# If len(roots)
|
is always 1 when we get here, it means that
# the input describes a complete ordering and there is only
# one possible output.
# When len(roots) > 1, we can choose any root to send to the
# output; this freedom represents the multiple complete orderings
# that satisfy the input restrictions. We arbitrarily take one of
# the roots using pop(). Note that for the algorithm to be efficient,
# this operation must be done in O(1) time.
root = roots.pop()
sorted.append(root)
for child in graph[root][1:]:
graph[child][0] = graph[child][0] - 1
if graph[child][0] == 0:
roots.append(child)
del graph[root]
if len(graph.items()) != 0:
# There is a loop in the input.
start = graph.popitem()
loop = [start[0]]
next = start[1][1]
while next != loop[0]:
loop.append(next)
next = graph[next][1]
loop.append(loop[0])
return (True, loop)
return sorted
|
tox-dev/tox
|
src/tox/util/lock.py
|
Python
|
mit
| 1,295
| 0.002317
|
"""holds locking
|
functionality that works across processes"""
from __future__ import absolute_import, unicode_literals
from contextlib import contextmanager
import py
from filelock import FileLock, Timeout
from tox.reporter import verbosity1
@contextmanager
def hold_
|
lock(lock_file, reporter=verbosity1):
py.path.local(lock_file.dirname).ensure(dir=1)
lock = FileLock(str(lock_file))
try:
try:
lock.acquire(0.0001)
except Timeout:
reporter("lock file {} present, will block until released".format(lock_file))
lock.acquire()
yield
finally:
lock.release(force=True)
def get_unique_file(path, prefix, suffix):
"""get a unique file in a folder having a given prefix and suffix,
with unique number in between"""
lock_file = path.join(".lock")
prefix = "{}-".format(prefix)
with hold_lock(lock_file):
max_value = -1
for candidate in path.listdir("{}*{}".format(prefix, suffix)):
try:
max_value = max(max_value, int(candidate.basename[len(prefix) : -len(suffix)]))
except ValueError:
continue
winner = path.join("{}{}{}".format(prefix, max_value + 1, suffix))
winner.ensure(dir=0)
return winner
|
Sriee/epi
|
data_structures/lib/__init__.py
|
Python
|
gpl-3.0
| 21
| 0
|
__
|
all__ = ['stack']
| |
egabancho/invenio
|
invenio/modules/sequencegenerator/backend.py
|
Python
|
gpl-2.0
| 2,606
| 0.00614
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without eve
|
n the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have r
|
eceived a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.legacy.dbquery import run_sql, IntegrityError
# Number of retries to insert a value in the DB storage
MAX_DB_RETRY = 10
class SequenceGenerator(object):
seq_name = None
def __init__(self):
assert self.seq_name
def _value_exists(self, value):
"""
Checks if the value exists in the storage
@param value: value to be checked in storage
@type value: string
@return: result of select SQL query
@rtype: tuple
"""
return run_sql("""SELECT seq_value FROM seqSTORE
WHERE seq_value=%s AND seq_name=%s""",
(value, self.seq_name))
def _insert_value(self, value):
"""
Inserts value into storage
@param value: value to be stored
@type value: string
@return: result of insert SQL query
@rtype: tuple
"""
run_sql("""INSERT INTO seqSTORE (seq_name, seq_value)
VALUES (%s, %s)""",
(self.seq_name, value))
def _next_value(self, *args, **kwargs):
"""
Internal implementation to calculate next value in sequence
"""
raise NotImplementedError
def next_value(self, *args, **kwargs):
"""
Get the next value in the sequence
@return: next value in sequence
@rtype: string
"""
db_retries = 0
value = None
while MAX_DB_RETRY > db_retries:
value = self._next_value(*args, **kwargs)
try:
self._insert_value(value)
break
except IntegrityError:
# The value is already in the storage, get next one
db_retries += 1
return value
|
aelk/loga
|
math/exgcd.py
|
Python
|
mit
| 429
| 0
|
def exgcd(a, b):
"""Uses the extended Euclidean algorithm to return
the gcd as well as the solutions to Bézout's identity:
|
coefficients x and y such that ax + by = gcd(a, b)."""
x, y = 0,
|
1
u, v = 1, 0
while a != 0:
quo = b // a
rem = b % a
m = x - (quo * u)
n = y - (quo * v)
b, a = a, rem
x, y = u, v
u, v = m, n
gcd = b
return gcd, x, y
|
janeen666/mi-instrument
|
mi/dataset/driver/wc_wm/cspp/wc_wm_cspp_telemetered_driver.py
|
Python
|
bsd-2-clause
| 1,972
| 0.002535
|
#!/usr/bin/env python
"""
@package mi.dataset.driver.wc_wm.cspp
@file mi/dataset/driver/wc_wm/cspp/wc_wm_cspp_telemetered_driver.py
@author Jeff Roy
@brief Driver for the wc_wm_cspp instrument
Release notes:
Initial Release
"""
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.dataset_driver import SimpleDatasetDriver
from mi.dataset.parser.wc_wm_cspp import \
WcWmCsppParser, \
WcWmEngTelemeteredDataParticle, \
WcWmMetadataTelemeteredDataParticle
from mi.dataset.parser.cspp_base import \
METADATA_PARTICLE_CLASS_KEY, \
DATA_PARTICLE_CLASS_KEY
from mi.core.versioning import version
@version("15.6.2")
def parse(unused, source_file_path, particle_data_handler):
"""
This is the method called by Uframe
:param unused
:param source_file_path This is the full path and filename of the file to be parsed
:param particle_data_handler Java Object to consume the output of the parser
:return particle_data_handler
"""
with open(source_file_path, 'rU') as stream_handle:
# create and instance of the concrete driver class defined below
driver = WcWmCsppRecoveredDriver(unused, stream_handle, particle_data_handler)
driver.processFileStream()
return particle_data
|
_handler
class WcWmCsppRecoveredDriver(SimpleDatasetDriver):
"""
Derived wc_wm_cspp driver class
All this needs to do is create a concrete _build_parser method
"""
def _build_parser(self, stream_handle):
parser_config = {
DataSetDriverConfigKeys.PARTICLE_CLASS: None,
DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: {
METADATA_PARTICLE_CLASS_KEY: WcWmMetadataTelemeteredDataParticle,
DATA_PARTICLE_CLASS_KEY: W
|
cWmEngTelemeteredDataParticle
}
}
parser = WcWmCsppParser(parser_config, stream_handle,
self._exception_callback)
return parser
|
ascension/angular-flask-mongo
|
angular_flask/core.py
|
Python
|
mit
| 298
| 0.013423
|
import os
from angu
|
lar_flask import app
from flask.ext.restless import APIManager
from flask.ext.mongoengine import MongoEngine
app.config["MONGODB_S
|
ETTINGS"] = {'DB':os.environ.get('MONGODB_DB'),"host":os.environ.get('MONGODB_URI')}
mongo_db = MongoEngine(app)
api_manager = APIManager(app)
|
parpg/parpg
|
tools/map_editor/scripts/gui/input.py
|
Python
|
gpl-3.0
| 1,824
| 0.016447
|
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2009 by the FIFE team
# http://www.fifengine.de
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# #######################################################
|
####
|
#########
from fife.extensions import pychan
import fife.extensions.pychan.widgets as widgets
class InputDialog(object):
"""
Input supplies a text box for entering data. The result is passed to onEntry.
onEntry - the function to call when a input is complete. Accepts one argument: a string of text.
"""
def __init__(self, prompt, onEntry, onCancel):
self._callback = onEntry
self._cancelCallback = onCancel
self._widget = pychan.loadXML('gui/input.xml')
self._widget.mapEvents({
'okButton' : self._complete,
'cancelButton' : self._cancel
})
self._widget.distributeInitialData({
'prompt' : prompt
})
self._widget.show()
def _complete(self):
self._callback(self._widget.collectData('inputBox'))
self._widget.hide()
def _cancel(self):
self._cancelCallback()
self._widget.hide()
|
xuru/pyvisdk
|
pyvisdk/do/action.py
|
Python
|
mit
| 1,144
| 0.009615
|
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def Action(vim, *args, **kwargs):
'''This data object type defines the action initiated by a scheduled task or
alarm.This is an abstract type. A client creates a scheduled task or an alarm
each of which triggers an action, defined by a subclass of this type.'''
obj = vim.client.factory.create('ns0:Action')
# do some validation checking...
if (len(args)
|
+ len(kwargs)) < 0:
raise IndexError('Expected at least 1 arguments got: %d' % len(args))
required = [ ]
optional = [ 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
|
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
|
openstack/compute-hyperv
|
compute_hyperv/nova/volumeops.py
|
Python
|
apache-2.0
| 33,067
| 0
|
# Copyright 2012 Pedro Navarro Perez
# Copyright 2013 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Management class for Storage-related functions (attach, detach, etc).
"""
import inspect
import os
import time
from nova.compute import task_states
from nova import exception
from nova import objects
from nova import utils
from nova.virt import block_device as driver_block_device
from nova.virt import driver
from nova.volume import cinder
from os_brick.initiator import connector
from os_win import constants as os_win_const
from os_win import utilsfactory
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import importutils
from oslo_utils import strutils
from compute_hyperv.i18n import _
import compute_hyperv.nova.conf
from compute_hyperv.nova import constants
from compute_hyperv.nova import pathutils
LOG = logging.getLogger(__name__)
CONF = compute_hyperv.nova.conf.CONF
def volume_snapshot_lock(f):
"""Synchronizes volume snapshot related operations.
The locks will be applied on a per-instance basis. The decorated method
must accept an instance object.
"""
def inner(*args, **kwargs):
all_args = inspect.getcallargs(f, *args, **kwargs)
instance = all_args['instance']
lock_name = "volume-snapshot-%s" % instance.name
@utils.synchronized(lock_name)
def synchronized():
return f(*args, **kwargs)
return synchronized()
return inner
class VolumeOps(object):
"""Management class for Volume-related tasks
"""
def __init__(self):
self._volume_api = cinder.API()
self._vmops_prop = None
self._block_dev_man_prop = None
self._vmutils = utilsfactory.get_vmutils()
self._default_root_device = 'vda'
self._load_volume_drivers()
def _load_volume_drivers(self):
self.volume_drivers = {
constants.STORAGE_PROTOCOL_SMBFS: SMBFSVolumeDriver(),
constants.STORAGE_PROTOCOL_ISCSI: ISCSIVolumeDriver(),
constants.STORAGE_PROTOCOL_FC: FCVolumeDriver(),
constants.STORAGE_PROTOCOL_RBD: RBDVolumeDriver()}
@property
def _vmops(self):
# We have to avoid a circular dependency.
if not self._vmops_prop:
self._vmops_prop = importutils.import_class(
'compute_hyperv.nova.vmops.VMOps')()
return self._vmops_prop
@property
def _block_dev_man(self):
if not self._block_dev_man_prop:
self._block_dev_man_prop = importutils.import_class(
'compute_hyperv.nova.block_device_manager.'
'BlockDeviceInfoManager')()
return self._block_dev_man_prop
def _get_volume_driver(self, connection_info):
driver_type = connection_info.get('driver_volume_type')
if driver_type not in self.volume_drivers:
raise exception.VolumeDriverNotFound(driver_type=driver_type)
return self.volume_drivers[driver_type]
def validate_host_configuration(self):
for protocol, volume_driver in self.volume_drivers.items():
try:
volume_driver.validate_host_configuration()
except exception.ValidationError as ex:
LOG.warning(
"Volume driver %(protocol)s reported a validation "
"error. Attaching such volumes will probably fail. "
"Error message: %(err_msg)s.",
dict(protocol=protocol, err_msg=ex.message))
def attach_volumes(self, context, volumes, instance):
for vol in volumes:
self.attach_volume(context, vol['connection_info'], instance)
def disconnect_volumes(self, block_device_info):
mapping = driver.block_device_info_get_mapping(block_device_info)
for vol in mapping:
self.disconnect_volume(vol['connection_info'])
def attach_volume(self, context, connection_info, instance,
disk_bus=constants.CTRL_TYPE_SCSI,
update_device_metadata=False):
tries_left = CONF.hyperv.volume_attach_retry_count + 1
while tries_left:
try:
self._attach_volume(context,
connection_info,
instance,
disk_bus,
update_device_metadata)
break
except Exception as ex:
tries_left -= 1
if not tries_left:
LOG.exception(
"Failed to attach volume %(connection_info)s "
"to instance %(instance_name)s. ",
{'connection_info': strutils.mask_dict_password(
connection_info),
'instance_name': instance.name})
# We're requesting a detach as the disk may have
# been attached to the instance but one of the
# post-attach operations failed.
self.detach_volume(context,
connection_info,
instance,
update_device_metadata)
raise exception.VolumeAttachFailed(
volume_id=connection_info['serial'],
reason=ex)
else:
LOG.warning(
"Failed to attach volume %(connection_info)s "
"to instance %(instance_name)s. "
"Tries left: %(tries_left)s.",
{'connection_info': strutils.mask_dict_password(
connection_info),
'instance_name': instance.name,
'tries_left': tries_left})
time.sleep(CONF.hyperv.volume_attach_retry_interval)
def _attach_volume(self, context, connection_info, instance,
disk_bus=constants.CTRL_TYPE_SCSI,
update_device_metadata=False):
LOG.debug(
"Attaching volume: %(connection_info)s to %(instance_name)s",
{'connection_info': strutils.mask_dict_password(connection_info),
'instance_name': instance.name})
volume_driver = self._get_volume_driver(connection_info)
volume_driver.attach_volume(connection_info,
instance.name,
disk_bus)
if update_device_metadata:
# When attaching volumes to already existing instances,
# the connection info passed to the driver is not saved
# yet within the BDM table.
self._block_dev_man.set_volume_bdm_connection_info(
context, instance, connection_info)
self._vmops.update_device_metadata(
context, instance)
qos_specs = connection_info['data'].get('qos_specs') or {}
if qos_specs:
volume_driver.set_disk_qos_specs(connection_info,
qos_specs)
def disconnect_volume(self, connection_info):
volume_driver = self._get_volume_driver(connection_info)
volume_driver.disconnect_volume(connection_i
|
nfo)
def
|
detach_volume(self, context, connection_info, instance,
update_device_metadata=False):
LOG.debug("Detaching volume: %(connection_info)s "
"
|
nfredrik/pyModelStuff
|
samples/abp/test_graphics.py
|
Python
|
bsd-3-clause
| 613
| 0.004894
|
"""
ABP analyzer and graphics tests
"
|
""
cases = [
('Run Pymodel Graphics to generate dot file from FSM model, no need use pma',
'pmg.py ABP'),
('Generate SVG file from dot',
'dotsvg ABP'),
# Now display ABP.dot in browser
('Run PyModel Analyzer to generate FSM from original FSM, should be the same',
'pma.py ABP'),
('Run PyModel Graphics to generate a file of graphics commands from new FSM',
'pmg.py ABPFSM'),
('Generate an svg file from the graphics commands',
|
'dotsvg ABPFSM'),
# Now display ABPFSM.svg in browser, should look the same as ABP.svg
]
|
ksigorodetskaya/Python_Ifmo
|
Part1/url-shorter/url_shorter/converter.py
|
Python
|
apache-2.0
| 675
| 0.017483
|
from string import digits, ascii_letters
valid_values = list(digits + ascii_letters) # приводим строку к списку
radix = len(valid_values) #основание
def convert(number):
result =[] #будем сюда складывать остатки от деления
while number:
result.insert
|
(0,valid_values[number % radix])
number //= radix
return ''.join(result)
def inverse(number):
result = 0
for p, i in enumerate(reversed(number)):
n = valid_values.index(i)
|
# получаем индекс нужного нам элемента списка
result += n * radix ** p
return result
|
cynapse/cynin
|
src/ubify.viewlets/ubify/viewlets/browser/sitetitle.py
|
Python
|
gpl-3.0
| 2,433
| 0.011919
|
###############################################################################
#cyn.in is an open source Collaborative Knowledge Management Appliance that
#enables teams to seamlessly work together on files, documents and content in
#a secure central environment.
#
#cyn.in v2 an open source appliance is distributed under the GPL v3 license
#along with commercial support options.
#
#cyn.in is a Cynapse Invention.
#
#Copyright (C) 2008 Cynapse India Pvt. Ltd.
#
#This program is free software: you can redistribute it and/or modify it under
#the terms of the GNU General Public License as published by the Free Software
#Foundation, either version 3 of the License, or any later version and observe
#the Additional Terms applicable to this program and must display appropriate
#legal notices. In accordance with Section 7(b) of the GNU General Public
#License version 3, these Appropriate Legal Notices must retain the display of
#the "Powered by cyn.in" AND "A Cynapse Invention" logos. You should have
#received a copy o
|
f the detailed Additional Terms License with this program.
#
#This program is distributed in the hope that it will be useful,
#but WITHOUT ANY WARRANTY; without even the implied warranty of
#MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
#Public License for more details.
#
#You should have received a copy of the GNU General Publi
|
c License along with
#this program. If not, see <http://www.gnu.org/licenses/>.
#
#You can contact Cynapse at support@cynapse.com with any problems with cyn.in.
#For any queries regarding the licensing, please send your mails to
# legal@cynapse.com
#
#You can also contact Cynapse at:
#802, Building No. 1,
#Dheeraj Sagar, Malad(W)
#Mumbai-400064, India
###############################################################################
from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile
from plone.app.layout.viewlets.common import ViewletBase
from zope.component import getMultiAdapter
class SiteTitleViewlet(ViewletBase):
render = ViewPageTemplateFile('sitetitle.pt')
def update(self):
portal_state = getMultiAdapter((self.context, self.request),
name=u'plone_portal_state')
self.site_title = portal_state.portal_title()
self.has_site_title = True
self.site_url = portal_state.portal_url()
|
niavlys/kivy
|
kivy/core/image/img_pil.py
|
Python
|
mit
| 3,319
| 0.000904
|
'''
PIL: PIL image loader
'''
__all__ = ('ImageLoaderPIL', )
try:
from PIL import Image as PILImage
except:
import Image as PILImage
from kivy.logger import Logger
from kivy.core.image import ImageLoaderBase, ImageData, ImageLoader
class ImageLoaderPIL(ImageLoaderBase):
'''Image loader based on the PIL library.
.. versionadded:: 1.0.8
Support for GIF animation added.
Gif animation has a lot of issues(transparency/color depths... etc).
In order to keep it simple, what is implimented here is what is
na
|
tively supported by the PIL library.
As a general rule, try to use gifs that have no transparency.
Gif's with transpar
|
ency will work but be prepared for some
artifacts until transparency support is improved.
'''
@staticmethod
def can_save():
return True
@staticmethod
def extensions():
'''Return accepted extensions for this loader'''
# See http://www.pythonware.com/library/pil/handbook/index.htm
return ('bmp', 'bufr', 'cur', 'dcx', 'fits', 'fl', 'fpx', 'gbr',
'gd', 'gif', 'grib', 'hdf5', 'ico', 'im', 'imt', 'iptc',
'jpeg', 'jpg', 'jpe', 'mcidas', 'mic', 'mpeg', 'msp',
'pcd', 'pcx', 'pixar', 'png', 'ppm', 'psd', 'sgi',
'spider', 'tga', 'tiff', 'wal', 'wmf', 'xbm', 'xpm',
'xv')
def _img_correct(self, _img_tmp):
'''Convert image to the correct format and orientation.
'''
# image loader work only with rgb/rgba image
if _img_tmp.mode.lower() not in ('rgb', 'rgba'):
try:
imc = _img_tmp.convert('RGBA')
except:
Logger.warning(
'Image: Unable to convert image to rgba (was %s)' %
(_img_tmp.mode.lower()))
raise
_img_tmp = imc
return _img_tmp
def _img_read(self, im):
'''Read images from an animated file.
'''
im.seek(0)
# Read all images inside
try:
img_ol = None
while True:
img_tmp = im
img_tmp = self._img_correct(img_tmp)
if img_ol:
# paste new frame over old so as to handle
# transparency properly
img_ol.paste(img_tmp, (0, 0), img_tmp)
img_tmp = img_ol
img_ol = img_tmp
yield ImageData(img_tmp.size[0], img_tmp.size[1],
img_tmp.mode.lower(), img_tmp.tostring())
im.seek(im.tell() + 1)
except EOFError:
pass
def load(self, filename):
try:
im = PILImage.open(filename)
except:
Logger.warning('Image: Unable to load image <%s>' % filename)
raise
# update internals
self.filename = filename
# returns an array of type ImageData len 1 if not a sequence image
return list(self._img_read(im))
@staticmethod
def save(filename, width, height, fmt, pixels):
image = PILImage.fromstring(fmt.upper(), (width, height), pixels)
image.save(filename)
return True
# register
ImageLoader.register(ImageLoaderPIL)
|
aaxelb/osf.io
|
api/preprint_providers/serializers.py
|
Python
|
apache-2.0
| 5,102
| 0.001176
|
from guardian.shortcuts import get_perms
from rest_framework import serializers as ser
from rest_framework.exceptions import ValidationError
from reviews.workflow import Workflows
from api.actions.serializers import ReviewableCountsRelationshipField
from api.base.utils import absolute_reverse, get_user_auth
from api.base.serializers import JSONAPISerializer, LinksField, RelationshipField, ShowIfVersion
class PreprintProviderSerializer(JSONAPISerializer):
filterable_fields = frozenset([
'allow_submissions',
'description',
'domain',
'domain_redirect_enabled',
'id',
'name',
'share_publish_type',
'reviews_workflow',
'permissions',
])
name = ser.CharField(read_only=True)
description = ser.CharField(read_only=True)
id = ser.CharField(read_only=True, max_length=200, source='_id')
advisory_board = ser.CharField(read_only=True)
example = ser.CharField(read_only=True, allow_null=True)
domain = ser.CharField(read_only=True, allow_null=False)
domain_redirect_enabled = ser.BooleanField(read_only=True)
footer_links = ser.CharField(read_only=True)
share_source = ser.CharField(read_only=True)
share_publish_type = ser.CharField(read_only=True)
email_support = ser.CharField(read_only=True, allow_null=True)
preprint_word = ser.CharField(read_only=True, allow_null=True)
allow_submissions = ser.BooleanField(read_only=True)
additional_providers = ser.ListField(read_only=True, child=ser.CharField())
# Reviews settings are the only writable fields
reviews_workflow = ser.ChoiceField(choices=Workflows.choices())
reviews_comments_private = ser.BooleanField()
reviews_comments_anonymous = ser.BooleanField()
permissions = ser.SerializerMethodField()
preprints = ReviewableCountsRelationshipField(
related_view='preprint_providers:preprints-list',
related_view_kwargs={'provider_id': '<_id>'}
)
taxonomies = RelationshipField(
related_view='preprint_providers:taxonomy-list',
related_view_kwargs={'provider_id': '<_id>'}
)
highlighted_taxonomies = RelationshipField(
related_view='preprint_providers:highlighted-taxonomy-list',
related_view_kwargs={'provider_id': '<_id>'}
)
licenses_acceptable = RelationshipField(
related_view='preprint_providers:license-list',
related_view_kwargs={'provider_id': '<_id>'}
)
links = LinksField({
'self': 'get_absolute_url',
'preprints': 'get_preprints_url',
'external_url': 'get_external_url'
})
# Deprecated fields
header_text = ShowIfVersion(
ser.CharField(read_only=True, default=''),
min_version='2.0', max_version='2.3'
)
banner_path = ShowIfVersion(
ser.CharField(read_only=True, default=''),
min_version='2.0', max_version='2.3'
)
logo_path = ShowIfVersion(
ser.CharField(read_only=True, default=''),
min_version='2.0', max_version='2.3'
)
email_contact = ShowIfVersion(
ser.CharField(read_only=True, allow_null=True),
min_version='2.0', max_version='2.3'
)
social_twitter = ShowIfVersion(
ser.CharField(read_only=True, allow_null=True),
min_version='2.0', max_version='2.3'
)
social_facebook = ShowIfVersion(
ser.CharField(read_only=True, allow_null=True),
min_version='2.0', max_version='2.3'
)
social_instagram = ShowIfVersion(
ser.CharField(read_only=True, allow_null=True),
min_version='2.0', max_version='2.3'
)
subjects_acceptable = ShowIfVersion(
ser.ListField(read_only=True, default=[]),
min_version='2.0', max_version='2.4'
)
class Meta:
type_ = 'preprint_providers'
def get_absolute_url(self, obj):
return obj.absolute_api_v2_url
def get_preprints_url(self, obj):
return absolute_reverse('preprint_providers:preprints-list', kwargs={
'provider_id': obj._id,
'version': self.context['request'].parser_context['kwargs']['version']
})
def get_external_url(self, obj):
return obj.external_url
def get_permissions(self, obj):
auth = get_user_auth(self.context['request'])
if not auth.user:
return []
return get_perms(auth.user, obj)
def validate(self, data):
required_fields = ('reviews_workflow', 'reviews_comments_private', 'reviews_comments_anonymous')
for field in required_fields:
if data.get(field) is None:
raise Vali
|
dationError('All reviews fields must be set at once: `{}`'.format('`, `'.join(required_fields)))
return data
def update(self, instance, validated_data):
instance.reviews_workflow = validated_data['reviews_workflow']
instance.reviews_comments_private = validated_data['reviews_comments_private']
instance.reviews_comments_anonymous = validated_data['reviews_c
|
omments_anonymous']
instance.save()
return instance
|
hackcyprus/junior
|
game/migrations/0001_initial.py
|
Python
|
mit
| 4,851
| 0.007627
|
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Problem'
db.create_table(u'game_problem', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=100)),
('order', self.gf('django.db.models.fields.IntegerField')()),
('html_template', self.gf('django.db.models.fields.CharField')(max_length=100)),
('multiplier', self.gf('django.db.models.fields.FloatField')(default=1.0)),
('base_points', self.gf('django.db.models.fields.IntegerField')(default=300)),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal(u'game', ['Problem'])
# Adding model 'Stage'
db.create_table(u'game_stage', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('unlocked_on', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
('points_earned', self.gf('django.db.models.fields.FloatField')()),
('problem', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['game.Problem'])),
('team', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['teams.Team'])),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal(u'game', ['Stage'])
# Adding model 'Attempt'
db.create_table(u'game_attempt', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('correct', self.gf('django.db.models.fields.BooleanField')(default=False)),
('stage', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['game.Stage'])),
('created', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
))
db.send_create_signal(u'game', ['Attempt'])
def backwards(self, orm):
# Deleting model 'Problem'
db.delete_table(u'game_problem')
# Deleting model 'Stage'
db.delete_table(u'game_stage')
# Deleting model 'Attempt'
db.delete_table(u'game_attempt')
models = {
u'game.attempt': {
'Meta': {'object_name': 'Attempt'},
'correct': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'stage': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['game.Stage']"})
},
u'game.problem': {
'Meta': {'object_name': 'Problem'},
'base_points': ('django.db.models.fields.IntegerField', [], {'default': '300'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'html_template': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiplier': ('django.db.models.fields.FloatField', [], {'default': '1.0'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'order': ('django.db.models.fields.IntegerField', [], {})
},
u'game.stage': {
'Meta': {'object_name': 'Stage'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'points_earned': ('django.db.models.fields.FloatField', [], {}),
'problem': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['game.Problem']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['teams.Team']"}),
'unlocked_on': ('django.db.models.fields.DateTimeField', [], {'null': 'True
|
', 'blank': 'True'})
},
u'teams.team': {
'Meta': {'object_name': 'Team'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name'
|
: ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'token': ('django.db.models.fields.CharField', [], {'default': "'6e61d7a5cfc2462d8f1637f9464dd1b5'", 'max_length': '32'})
}
}
complete_apps = ['game']
|
fatho/kos-c
|
docs/source/conf.py
|
Python
|
bsd-3-clause
| 4,828
| 0.000621
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# kOS-C documentation build configuration file, created by
# sphinx-quickstart on Tue Apr 4 18:06:04 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
import sphinx_rtd_theme
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = ['sphinx.ext.todo',
'sphinx.ext.mathjax',
'sphinx.ext.githubpages']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'kOS-C'
copyright = '2017, Fabian Thorand'
author = 'Fabian Thorand'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3.0.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for thes
|
e cases.
language = None
# List of patterns, relative to source director
|
y, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = True
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'kOS-Cdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
'papersize': 'a4paper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'kOS-C.tex', 'kOS-C Documentation',
'Fabian Thorand', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'kos-c', 'kOS-C Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'kOS-C', 'kOS-C Documentation',
author, 'kOS-C', 'One line description of project.',
'Miscellaneous'),
]
|
roadmapper/ansible
|
test/units/modules/network/cloudengine/test_ce_is_is_instance.py
|
Python
|
gpl-3.0
| 2,883
| 0.001041
|
# (c) 2019 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.cloudengine import ce_is_is_instance
from units.modules.network.cloudengine.ce_module import TestCloudE
|
ngineModule, load_fixture
from units.modules.utils import set_module_args
class TestCloudEngineLacpModule(TestCloudEngineModule):
module = ce_is_is_instance
def setUp(self):
super(TestCloudEngineLacpModule, self).setUp()
self.mock_get_config = patch('ansible.modules.n
|
etwork.cloudengine.ce_is_is_instance.get_nc_config')
self.get_nc_config = self.mock_get_config.start()
self.mock_set_config = patch('ansible.modules.network.cloudengine.ce_is_is_instance.set_nc_config')
self.set_nc_config = self.mock_set_config.start()
self.set_nc_config.return_value = None
def tearDown(self):
super(TestCloudEngineLacpModule, self).tearDown()
self.mock_set_config.stop()
self.mock_get_config.stop()
def test_isis_instance_present(self):
xml_existing = load_fixture('ce_is_is_instance', 'before.txt')
xml_end_state = load_fixture('ce_is_is_instance', 'after.txt')
update = ['isis 100', 'vpn-instance __public__']
self.get_nc_config.side_effect = (xml_existing, xml_end_state)
config = dict(
instance_id=100,
vpn_name='__public__',
state='present')
set_module_args(config)
result = self.execute_module(changed=True)
self.assertEquals(sorted(result['updates']), sorted(update))
def test_isis_instance_present(self):
xml_existing = load_fixture('ce_is_is_instance', 'after.txt')
xml_end_state = load_fixture('ce_is_is_instance', 'before.txt')
update = ['undo isis 100']
self.get_nc_config.side_effect = (xml_existing, xml_end_state)
config = dict(
instance_id=100,
vpn_name='__public__',
state='absent')
set_module_args(config)
result = self.execute_module(changed=True)
self.assertEquals(sorted(result['updates']), sorted(update))
|
BamX/dota2-matches-statistic
|
run.py
|
Python
|
mit
| 191
| 0.031414
|
#!env/bin/python
from app import
|
app
import sys
port = 5000
debug = True
if len(sys.argv) == 3:
debug = sys.argv[1] == 'debug'
port = int(sys.argv[2])
app.run(debug = debug, po
|
rt = port)
|
ccubed/Dyslexml
|
dyslexml/__init__.py
|
Python
|
mit
| 157
| 0
|
from .todict import *
from .toxml import *
class Dyslexml:
def __init__(self):
|
self.toDict = to
|
dict.parse
self.toXml = toxml.translate
|
dims/neutron
|
neutron/tests/unit/api/rpc/agentnotifiers/test_bgp_dr_rpc_agent_api.py
|
Python
|
apache-2.0
| 3,575
| 0
|
# Copyright 2016 Huawei Technologies India Pvt. Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicabl
|
e law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from neutron.api
|
.rpc.agentnotifiers import bgp_dr_rpc_agent_api
from neutron import context
from neutron.tests import base
class TestBgpDrAgentNotifyApi(base.BaseTestCase):
def setUp(self):
super(TestBgpDrAgentNotifyApi, self).setUp()
self.notifier = (
bgp_dr_rpc_agent_api.BgpDrAgentNotifyApi())
mock_cast_p = mock.patch.object(self.notifier,
'_notification_host_cast')
self.mock_cast = mock_cast_p.start()
mock_call_p = mock.patch.object(self.notifier,
'_notification_host_call')
self.mock_call = mock_call_p.start()
self.context = context.get_admin_context()
self.host = 'host-1'
def test_notify_dragent_bgp_routes_advertisement(self):
bgp_speaker_id = 'bgp-speaker-1'
routes = [{'destination': '1.1.1.1', 'next_hop': '2.2.2.2'}]
self.notifier.bgp_routes_advertisement(self.context, bgp_speaker_id,
routes, self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
def test_notify_dragent_bgp_routes_withdrawal(self):
bgp_speaker_id = 'bgp-speaker-1'
routes = [{'destination': '1.1.1.1'}]
self.notifier.bgp_routes_withdrawal(self.context, bgp_speaker_id,
routes, self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
def test_notify_bgp_peer_disassociated(self):
bgp_speaker_id = 'bgp-speaker-1'
bgp_peer_ip = '1.1.1.1'
self.notifier.bgp_peer_disassociated(self.context, bgp_speaker_id,
bgp_peer_ip, self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
def test_notify_bgp_peer_associated(self):
bgp_speaker_id = 'bgp-speaker-1'
bgp_peer_id = 'bgp-peer-1'
self.notifier.bgp_peer_associated(self.context, bgp_speaker_id,
bgp_peer_id, self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
def test_notify_bgp_speaker_created(self):
bgp_speaker_id = 'bgp-speaker-1'
self.notifier.bgp_speaker_created(self.context, bgp_speaker_id,
self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
def test_notify_bgp_speaker_removed(self):
bgp_speaker_id = 'bgp-speaker-1'
self.notifier.bgp_speaker_removed(self.context, bgp_speaker_id,
self.host)
self.assertEqual(1, self.mock_cast.call_count)
self.assertEqual(0, self.mock_call.call_count)
|
mugurrus/superdesk-core
|
superdesk/text_utils.py
|
Python
|
agpl-3.0
| 5,251
| 0.001905
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2017 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
import re
from lxml import etree # noqa
from superdesk import etree as sd_etree
from lxml import html as lxml_html
from lxml.html import clean
from flask import current_app as app
import chardet
# This pattern matches http(s) links, numbers (1.000.000 or 1,000,000 or 1 000 000), regulars words,
# compound words (e.g. "two-done") or abbreviation (e.g. D.C.)
# If you modify please keep in sync with superdesk-client/core/scripts/apps/authoring/authoring/directives/WordCount.js
WORD_PATTERN = re.compile(r'https?:[^ ]*|([0-9]+[,. ]?)+|([\w]\.)+|[\w][\w-]*')
def get_text_word_count(text):
"""Get word count for given plain text.
:param str text: text string
:return int: word count
"""
return sum(1 for word in WORD_PATTERN.finditer(text))
def get_text(markup, content='xml', lf_on_block=False, space_on_elements=False):
"""Get plain text version of (X)HTML or other XML element
if the markup can't be parsed, it will be returned unchanged
:param str markup: string to convert to plain text
:param str content: 'xml' or 'html', as in parse_html
:param bool lf_on_block: if True, add a line feed on block elements' tail
:param bool space_on_elements: if True, add a space on each element's tail
mainly used to count words with non HTML markup
:return str: plain text version of markup
"""
try:
root = sd_etree.parse_html(
markup,
content=content,
lf_on_block=lf_on_block,
space_on_elements=space_on_elements)
text = etree.tostring(root, encoding='unicode', method='text')
return text
except etree.ParseError:
return markup
def get_word_count(markup, no_html=False):
"""Get word count for given html.
:param str markup: xhtml (or other xml) markup
:param bool no_html: set to True if xml param is not (X)HTML
if True, a space will be added after each element to separate words.
This avoid to have construct like <hl2>wo
|
rd</hl2><p>another</p> (like in NITF)
being counted as one word.
:return int: count of words inside the text
"""
if no_html:
return get_text_word_count(get_text(markup, content='xml', space_on_elements=True))
els
|
e:
return get_text_word_count(get_text(markup, content='html', lf_on_block=True))
def update_word_count(update, original=None):
"""Update word count if there was change in content.
:param update: created/updated document
:param original: original document if updated
"""
if update.get('body_html'):
update.setdefault('word_count', get_word_count(update.get('body_html')))
else:
# If the body is removed then set the count to zero
if original and 'word_count' in original and 'body_html' in update:
update['word_count'] = 0
def get_char_count(html):
"""Get character count for given html.
:param html: html string to count
:return int: count of chars inside the text
"""
return len(get_text(html))
def get_reading_time(html, word_count=None, language=None):
"""Get estimanted number of minutes to read a text
Check https://dev.sourcefabric.org/browse/SDFID-118 for details
:param str html: html content
:param int word_count: number of words in the text
:param str language: language of the text
:return int: estimated number of minute to read the text
"""
if language and language.startswith('ja'):
return round(len(re.sub(r'[\s]', '', get_text(html))) / app.config['JAPANESE_CHARACTERS_PER_MINUTE'])
if not word_count:
word_count = get_word_count(html)
reading_time_float = word_count / 250
reading_time_minutes = int(reading_time_float)
reading_time_rem_sec = int((reading_time_float - reading_time_minutes) * 60)
if reading_time_rem_sec >= 30:
reading_time_minutes += 1
return reading_time_minutes
def sanitize_html(html):
"""Sanitize HTML
:param str html: unsafe HTML markup
:return str: sanitized HTML
"""
if not html:
return ""
blacklist = ["script", "style", "head"]
root_elem = lxml_html.fromstring(html)
cleaner = clean.Cleaner(
add_nofollow=False,
kill_tags=blacklist
)
cleaned_xhtml = cleaner.clean_html(root_elem)
safe_html = etree.tostring(cleaned_xhtml, encoding="unicode")
# the following code is legacy (pre-lxml)
if safe_html == ", -":
return ""
return safe_html
def decode(bytes_str):
"""Decode bytes value
try to decode using UTF-8, or to detect encoding. Will ignore bad chars as a last resort
@return (str): decoded string
"""
try:
return bytes_str.decode('utf-8')
except UnicodeDecodeError:
try:
return bytes_str.decode(chardet.detect(bytes_str)['encoding'])
except Exception:
return bytes_str.decode('utf-8', 'ignore')
|
systemcrash/service.subtitles.subs_com_ru
|
resources/lib/scrutest.py
|
Python
|
gpl-2.0
| 2,013
| 0.000994
|
# -*- coding: utf-8 -*-
from shutil import rmtree
from tempfile import mkdtemp
from omdbapi import OMDbAPI
from scrusubtitles import ScruSubtitles
from scrusubtitles import ScruSubtitlesListener
from scrusubtitles import ScruSubtitlesLogger
class TestService(ScruSubtitlesListener, ScruSubtitlesLogger):
def __init__(self):
super(TestService, self).__init__()
self._omdbapi = OMDbAPI()
self._omdbapi.logger = self
self._scrusubtitles = ScruSubtitles()
self._scrusubtitles.listener = self
self._scrusubtitles.logger = self
self._scrusubtitles.workdir = mkdtemp()
self._num_subtitles_downloaded = 0
self._num_subtitles_found = 0
def cleanup(self):
rmtre
|
e(self._scrusubtitles.workdir)
def lookup(self, title, year):
return self._omdbapi.search(title, year)
def download(self, url, filename):
self._num_subtitles_downloaded = 0
self._scrusubtitles.download(url, filename)
self.info(u'{0} subtitles downloaded'.format(self._
|
num_subtitles_downloaded))
def search(self, imdb_id, languages):
self._num_subtitles_found = 0
self._scrusubtitles.search(imdb_id, languages)
self.info(u'{0} subtitles found'.format(self._num_subtitles_found))
def on_subtitle_found(self, subtitle):
self._num_subtitles_found += 1
self.info(u'Found {0} subtitle {1}'.format(subtitle['language'], subtitle['filename']))
for key in subtitle:
self.debug(u' {0}: {1}'.format(key, subtitle[key]))
def on_subtitle_downloaded(self, path):
self._num_subtitles_downloaded += 1
self.info(u'Subtitle {0} downloaded'.format(path))
def debug(self, message):
print u'DEBUG: {0}'.format(message)
def info(self, message):
print u'INFO: {0}'.format(message)
def warn(self, message):
print u'WARN: {0}'.format(message)
def error(self, message):
print u'ERROR: {0}'.format(message)
|
openhatch/new-mini-tasks
|
vendor/packages/Django/tests/regressiontests/middleware/models.py
|
Python
|
apache-2.0
| 287
| 0
|
from djang
|
o.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Band(models.Model):
name = models.CharField(max_length=100)
class Meta:
ordering = ('name',)
def __str__(self):
return self.na
|
me
|
JonathanPierce/Algae
|
preprocessors/tokenizer/main.py
|
Python
|
mit
| 1,473
| 0.007468
|
# -*- coding: utf-8 -*-
import sys
import tokenizer
import os
import re
import helpers.io as io
# just tokenizes the file
def simple(filepath):
tok = tokenizer.Tokenizer(filepath)
results = tok.full_tokenize()
return tokenizer.compress_tokens(results)
# smartly tokenizes a function for modified token edit distance (MTED)
# tokenPath is the entry point (potentially a main file, if C++ templates are used)
# sources is an array of all of the paths we are actually i
|
nterested in.
# If compres sis true, each token will be reduced to a single character. Good for edit distance!
def mted
|
(tokenPath, sources, compress):
tok = tokenizer.Tokenizer(tokenPath)
functions = tok.split_functions(False)
# sort them appropriately
def comp(a,b):
lena = len(a[1])
lenb = len(b[1])
if lena == lenb:
# if lengths are tied, sort alphabetically based on function name
if a[0] < b[0]:
return -1
else:
return 1
else:
return lena - lenb
functions.sort(comp)
# compress and output
results = ""
for funct in functions:
if funct[2] in sources:
if compress:
results += tokenizer.compress_tokens(funct[1])
else:
results += " ".join(funct[1])
if compress == False:
results += " "
# return results
return results.strip()
|
lightopa/Aiopa-Battles
|
updater.py
|
Python
|
mit
| 9,215
| 0.007271
|
import urllib.request
import pickle
import sys
import ast
try:
import variables as v
except:
class var():
def __init__(self):
self.screen = None
v = var()
import pygame as py
class textLabel(py.sprite.Sprite):
def __init__(self, text, pos, colour, font, size, variable = False, centred = False):
super().__init__()
self.text = text
self.pos = pos
self.colour = colour
self.font = font
self.size = size
self.variable = variable
self.centred = centred
def update(self):
pos = self.pos
font = py.font.Font(self.font, self.size)
if not self.variable:
label = font.render(self.text, 1, self.colour)
if self.variable:
label = font.render(str(getattr(v, self.text)), 1, self.colour)
if self.centred:
pos = list(self.pos)
pos[0] -= font.size(self.text)[0] / 2
pos[1] -= font.size(self.text)[1] / 2
pos = tuple(pos)
v.screen.blit(label, pos)
class Button(py.sprite.Sprite):
def __init__(self, text, pos, size, hovercolour, normalcolour, font, ID, centred = False, bsize=(0,0)):
super().__init__()
self.ID = ID
self.hovered = False
self.text = text
self.pos = pos
self.hcolour = hovercolour
self.ncolour = normalcolour
self.font = font
self.font = py.font.Font(font, int(size))
self.centred = centred
self.size = bsize
self.set_rect()
def update(self):
self.set_rend()
py.draw.rect(v.screen, self.get_color(), self.rect)
v.screen.blit(self.rend, self.rect)
if self.rect.collidepoint(py.mouse.get_pos()):
self.hovered = True
else:
self.hovered = False
def set_rend(self):
self.rend = self.font.render(self.text, True, (0,0,0))
def get_color(self):
if self.hovered:
return self.hcolour
else:
return self.ncolour
def set_rect(self):
self.set_rend()
self.rect = self.rend.get_rect()
if not self.centr
|
ed:
self.rect.topleft =
|
self.pos
if self.centred:
self.rect.center = self.pos
if not self.size[0] == 0:
self.rect.width = self.size[0]
if not self.size[1] == 0:
self.rect.height = self.size[1]
def pressed(self):
mouse = py.mouse.get_pos()
if mouse[0] > self.rect.topleft[0]:
if mouse[1] > self.rect.topleft[1]:
if mouse[0] < self.rect.bottomright[0]:
if mouse[1] < self.rect.bottomright[1]:
return True
else: return False
else: return False
else: return False
else: return False
import os, shutil
theFont = None
py.init()
v.screen = py.display.set_mode((640, 480))
v.screen.fill((20, 20, 20))
textLabel("Checking For Updates...", (320, 240), (255, 255, 255), theFont, 50, False, True).update()
py.display.flip()
tries = 0
def reporthook(count, blockSize, totalSize):
if totalSize == -1:
print("FAILED TOTALSIZE")
raise Exception()
#Shows percentage of download
py.event.pump()
for event in py.event.get():
if event.type == py.QUIT:
sys.exit()
percent = int(count*blockSize*100/totalSize)
rect = py.Rect(100, 240, percent*4.4, 30)
v.screen.fill((20, 20, 20))
py.draw.rect(v.screen, (255, 0, 0), rect)
py.draw.rect(v.screen, (0, 0, 0), rect, 2)
py.draw.rect(v.screen, (0, 0, 0), (100, 240, 440, 30), 2)
textLabel("Downloading...", (320, 150), (255, 255, 255), theFont, 50, False, True).update()
textLabel(str(percent) + "%", (320, 255), (255, 255, 255), theFont, 20, False, True).update()
py.display.flip()
def recursive_overwrite(src, dest, ignore=None):
if os.path.isdir(src):
if not os.path.isdir(dest):
os.makedirs(dest)
files = os.listdir(src)
if ignore is not None:
ignored = ignore(src, files)
else:
ignored = set()
for f in files:
if f not in ignored:
recursive_overwrite(os.path.join(src, f),
os.path.join(dest, f),
ignore)
else:
shutil.copyfile(src, dest)
def updateCheck():
global latest
page = urllib.request.urlopen('https://api.github.com/repos/lightopa/aiopa-battles/git/refs/heads/master')
#print(page.read().decode("utf-8"))
#data = json.loads(page.read().decode("utf-8"))
data = ast.literal_eval(page.read().decode("utf-8"))
latest = data["object"]["sha"]
#ind = page.find('class="sha btn btn-outline"')
#latest = page[ind + 38:ind + 45]
#print(latest)
#CHECK IF LATEST IS PROPER
try:
f = open("Update/current.version", "rb")
current = pickle.load(f)
f.close()
except:
print("create new file")
try:
os.mkdir("Update")
except:
pass
f = open("Update/current.version", "wb")
current = 0000
pickle.dump(current, f)
f.close()
print(current, "vs", latest)
if current != latest:
from os import remove
try:
remove("Update/download.zip")
except:
pass
print("downloading latest")
buttons = py.sprite.Group()
buttons.add(Button("Update", (220, 240), 60, (100, 100, 100), (255, 255, 255), theFont, "Y", centred=True))
buttons.add(Button("Ignore", (420, 240), 60, (100, 100, 100), (255, 255, 255), theFont, "N", centred=True))
buttons.add(Button("Skip Update", (320, 300), 40, (100, 100, 100), (255, 255, 255), theFont, "S", centred=True))
labels = py.sprite.Group()
labels.add(textLabel("An Update Is Available:", (320, 150), (255, 255, 255), theFont, 50, False, True))
labels.add(textLabel(str(str(current) + " ==> " + str(latest)), (320, 180), (255, 255, 255), theFont, 20, False, True))
while True:
py.event.pump()
v.screen.fill((20, 20, 20))
buttons.update()
labels.update()
for event in py.event.get():
if event.type == py.QUIT:
sys.exit()
elif event.type == py.MOUSEBUTTONDOWN:
for button in buttons:
if button.pressed():
id = button.ID
if id == "Y":
global tries
tries = 0
download()
return
if id == "N":
return
if id == "S":
f = open("Saves/current.version", "wb")
current = latest
pickle.dump(current, f)
f.close()
return
py.display.flip()
else:
v.screen.fill((20, 20, 20))
t = textLabel("No Update!", (320, 250), (255, 0, 0), theFont, 70, False, True)
v.current = current
t.update()
py.display.update()
if __name__ == "__main__":
py.time.wait(2000)
def download():
global tries
try:
try:
os.mkdir("Update")
except:
pass
urllib.request.urlretrieve("https://github.com/lightopa/Aiopa-Battles/archive/master.zip", "Update/download.zip", reporthook)
f = open("Update/current.version", "wb")
current = latest
pickle.dump(current, f)
f.close()
unzip()
|
gaeun/open-event-orga-server
|
app/views/users/export.py
|
Python
|
gpl-3.0
| 2,211
| 0.003166
|
from flask import Blueprint
from flask import flash
from flask import make_response, render_template
from flask_login import current_user
from markupsafe import Markup
from app.helpers.data_getter import DataGetter
from app.helpers.auth import AuthManager
from app.helpers.exporters.ical import ICalExporter
from app.helpers.exporters.pentabarfxml import PentabarfExporter
from app.helpers.exporters.xcal import XCalExporter
from app.helpers.permission_decorators import can_access
event_export = Blueprint('event_export', __name__, url_prefix='/events/<int:event_id>/export')
@event_export.route('/')
@can_access
def display_export_view(event_id):
event = DataGetter.get_event(event_id)
export_jobs = DataGetter.get_export_jobs(event_id)
user = current_user
if not AuthManager.is_verified_user():
flash(Markup("Your account is unverified. "
"Please verify by clicking on the confirmation link that has been emailed to you."
'<br>Did not get the email? Please <a href="/resend_email/" class="alert-link"> '
'click here to resend the confirmation.</a>'))
return render_template(
'gentelella/admin/event/export/export.html', event=event, export_jobs=export_jobs,
current_user=user
)
@event_export.route('/pentabarf.xml')
@can_access
def pentabarf_export_view(event_id):
response = make_response(PentabarfExporter.export(event_id))
response.headers["Content-Type"] = "application/xml"
|
response.headers["Content-Disposition"] = "attachment; filename=pentabarf.xml"
return response
@event_export.route('/calendar.ical')
@can_access
def ical_export_view(event_id):
response = make_response(ICalExporter.export(event_id))
response.headers["Content-Type"] = "text/calendar"
response
|
.headers["Content-Disposition"] = "attachment; filename=calendar.ics"
return response
@event_export.route('/calendar.xcs')
@can_access
def xcal_export_view(event_id):
response = make_response(XCalExporter.export(event_id))
response.headers["Content-Type"] = "text/calendar"
response.headers["Content-Disposition"] = "attachment; filename=calendar.xcs"
return response
|
rutherford/tikitiki
|
tikitiki/__init__.py
|
Python
|
bsd-3-clause
| 255
| 0.003922
|
from .working_gif import working_enco
|
ded
from .splash import SplashScreen, Spinner, CheckProcessor
from .multilistbox import MultiListbox
from .utils
|
import set_widget_state, set_binding, set_button_action, set_tab_order
from .tooltip import ToolTip
|
cbeck88/fifengine
|
tests/extension_tests/loaders_tests.py
|
Python
|
lgpl-2.1
| 2,394
| 0.012949
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ####################################################################
# Copyright (C) 2005-2013 by the FIFE team
# http://www.fifengine.net
# This file is part of FIFE.
#
# FIFE is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the
# Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
# ####################################################################
from extension_test_utils import *
from loaders import *
class TestLoaders(unittest.TestCase):
def setUp(self):
self.engine = getEngine()
self.model = self.engine.getModel()
self.metamodel = self.model.getMetaModel()
def tearDown(self):
del self.engine
def testLoading(self):
loadMapFile("content/maps/new_official_map.xml", self.engine)
query = self.metamodel.getObjects("id", "15001")
self.assertEqual(len(query), 1)
query = self.metamodel.getObj
|
ects("id", "15201")
self.assertEqual(len(query), 1)
query = self.model.getMaps("id", "OfficialMap")
self.assert
|
Equal(len(query), 1)
self.map = query[0]
# self.assertEqual(self.map.get("Name"), "official_map.xml")
self.assertEqual(self.map.get("Version"), '1')
self.assertEqual(self.map.get("Author"), "barra")
query = self.map.getElevations("id", "OfficialMapElevation")
self.assertEqual(len(query), 1)
self.elevation = query[0]
query = self.elevation.getLayers("id", "OfficialMapTileLayer")
self.assertEqual(len(query), 1)
self.layer = query[0]
self.assertEqual(self.layer.hasInstances(), True)
instances = self.layer.getInstances()
# removed from test set now due to switch to new directory structure -> content moved to clients
# to be considered if this should be taken into use again
TEST_CLASSES = []
if __name__ == '__main__':
unittest.main()
|
treverhines/ModEst
|
modest/pymls/init.py
|
Python
|
mit
| 7,780
| 0.002571
|
"""
Created on Thu May 05 20:02:00 2011
@author: Tillsten
"""
import numpy as np
from scipy.linalg import qr
eps = np.finfo(float).eps
def mls(B, v, umin, umax, Wv=None, Wu=None, ud=None, u=None, W=None, imax=100):
"""
mls - Control allocation using minimal least squares.
[u,W,iter] = mls_alloc(B,v,umin,umax,[Wv,Wu,ud,u0,W0,imax])
Solves the bounded sequential least-squares problem
min ||Wu(u-ud)|| subj. to u in M
where M is the set of control signals solving
min ||Wv(Bu-v)|| subj. to umin <= u <= umax
using a two stage active set method. Wu must be diagonal since the
problem is reformulated as a minimal least squares problem. The
implementation does not handle the case of coplanar controls.
Inputs:
-------
B control effectiveness matrix (k x m)
v commanded virtual control (k x 1)
umin lower position limits (m x 1)
umax upper position limits (m x 1)
Wv virtual control weighting matrix (k x k) [I]
Wu control weighting matrix (m x m), diagonal [I]
ud desired control (m x 1) [0]
u0 initial point (m x 1)
W0 initial working set (m x 1) [empty]
imax max no. of iterations [100]
Outputs:
-------
u optimal control
W optimal active set
iter no. of iterations (= no. of changes in the working set + 1)
0 if u_i not saturated
Active set syntax: W_i = -1 if u_i = umin_i
+1 if u_i = umax_i
Directly Based on the code from:
Ola Harkegard, www.control.isy.liu.se/~ola
see licsence.
"""
#k = number of virtual controls
#m = number of variables (actuators)
k, m = B.shape
if u == None:
u = np.mean(umin + umax, 0)[:, None]
if W == None:
W = np.zeros((m, 1))
if ud == None:
ud = np.zeros((m, 1))
if Wu == None:
Wu = np.eye(m)
if Wv == None:
Wv = np.eye(k)
phase = 1
#Reformulate as a minimal least squares problem. See 2002-03-08 (1).
A = Wv.dot(B).dot(np.linalg.pinv(Wu))
b = Wv.dot(v - B.dot(ud))
xmin = (umin - ud).flatten()
xmax = (umax - ud).flatten()
# Compute initial point and residual.
x = Wu.dot(u - ud)
r = np.atleast_2d(A.dot(x) - b)
#Determine indeces of free variables
i_free = (W == 0).flatten()
m_free = np.sum(i_free)
for i in range(imax):
#print 'Iter: ', i
if phase == 1:
A_free = A[:, i_free]
if m_free <= k:
if m_free > 0:
p_free = np.linalg.lstsq(-A_free, r)[0]
else:
q1, r1 = qr(A_free.T)
p_free = -q1.dot(np.solve(r1.T, r))
p = np.zeros((m, 1))
if A.shape[1] > 1:
p[i_free] = p_free
else:
p[i_free] = p_free.flatten()
else:
i_fixed = np.logical_not(i_free)
m_fixed = m - m_free
if m_fixed > 0:
HT = U[i_fixed.squeeze(), :].T
V, Rtot = qr(np.atleast_2d(HT))
V1 = V[:, :m_fixed]
V2 = V[:, m_fixed + 1:]
R = Rtot[:, m_fixed]
else:
V, Rtot = np.array([[]]), np.array([[]])
V1 = V2 = R = V.T
s = -V2.T.dot(z)
pz = V2.dot(s)
p = U.dot(pz)
x_opt = x + p
infeasible = np.logical_or(x_opt < xmin, x_opt > xmax)
if not np.any(infeasible[i_free]):
x = x_opt
if phase == 1:
r = r + A.dot(p)
else:
z = z + pz
if phase == 1 and m_free >= k:
phase = 2
Utot, Stot = qr(A.T)
U = Utot[:, k:]
z = U.T.dot(x)
else:
lam = np.zeros((m, 1))
if m_free < m:
if phase == 1:
g = A.T.dot(r)
lam = -W * g
else:
lam[i_fixed] = -W[i_fixed] * np.linalg.solve(R, V1.T.dot(z))
if np.all(lam >= -eps):
u = np.linalg.solve(Wu, x) + ud
return u
lambda_neg, i_neg = np.min(lam), np.argmin(lam)
W[i_neg] = 0
i_free[i_neg] = True
m_free += 1
else:
dist = np.ones(m)
i_min = np.logical_and(i_free, p.flat < 0).flatten()
i_max = np.logical_and(i_free, p.flat > 0).flatten()
dist[i_min] = (xmin[i_min] - x[i_min]) / p[i_min]
dist[i_max] = (xmax[i_max] - x[i_max]) / p[i_max]
alpha, i_alpha = np.min(dist), np.argmin(dist)
x = x + alpha * p
if phase == 1:
r = r + A.dot(alpha * p) #!!
else:
z = z + alpha * pz
W[i_alpha] = np.sign(p[i_alpha])
if i_free[i_alpha]:
i_free[i_alpha] = False
m_free -= 1
u = np.linalg.solve(Wu, x) + ud
return u
def bounded_lsq(A, b, lower_lim, upper_lim):
"""
Minimizes:
|Ax-b|_2
for lower_lim<x<upper_lim.
"""
return mls(A, b, lower_lim, upper_lim)
def test_bounded_lsq():
from numpy.core.umath_tests import matrix_multiply
s = np.linspace(0, 10, 100)
A = np.exp(-((s - 5) ** 2) / 20)
A = A[:, None]
b = 16 * A
x = bounded_lsq(A, b, np.atleast_2d(0), np.atleast_2d(15))
np.testing.assert_almost_equal(x, 15)
A = np.array([[1, -3], [5, 7]])
b = np.array([[-50], [50]])
ll = np.array(([[-10], [-10]]))
ul = np.array(([[10], [10]]))
x0 = bounded_lsq(A, b, ll, ul)
np.testing.assert_array_almost_equal(x0, np.array([[-4.61538462], [10.]]))
if __name__ == '__main__':
from numpy.core.umath_tests import matrix_multiply
import matplotlib.pyplot as plt
test_bounded_lsq()
s = np.linspace(0, 10, 100)
A = np.exp(-((s - 5) ** 2) / 20)
A = A[:, None]
b = 16 * A
x = bounded_lsq(A, b, np.atleast_2d(0), np.atleast_2d(4))
plt.plot(A.dot(x))
plt.plot(b)
plt.figure()
plt.rcParams['font.family'] = 'serif'
A = np.array([[1, -3], [5, 7]])
b = np.array([[-50], [50]])
ll = np.array(([[-10], [-10]]))
ul = np.array(([[10], [10]]))
Ud = np.array(([0, 0]))
gamma = 1000
x0 = bounded_lsq(A, b, ll, ul)
x = np.linspace(-30, 30, 500)
y = np.linspace(-30, 30, 500)
X, Y = np.meshgrid(x, y)
S = np.dstack((X, Y))
SN = matrix_multiply(S, A.T)
plt.clf()
plt.conto
|
urf(x, y, np.sqrt(((SN - b.T) ** 2).sum(-1)), 30,
cmap=plt.cm.PuBu_r)
plt.colorbar()
#plt.axhline(ll[0])
#plt.axhline(ul
|
[0])
#plt.axvline(ll[1])
#plt.axvline(ul[1])
rect = np.vstack((ll, ul - ll))
patch = plt.Rectangle(ll, *(ul - ll), facecolor=(0.0, 0., 0., 0))
plt.gca().add_patch(patch)
plt.annotate("Bounded Min",
xy=x0, xycoords='data',
xytext=(-5, 5), textcoords='data',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc3"),
)
plt.annotate("Lsq Min",
xy=np.linalg.lstsq(A, b)[0], xycoords='data',
xytext=(20, 10), textcoords='offset points',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc3"),
)
plt.scatter(*x0)
plt.scatter(*np.linalg.lstsq(A, b)[0])
plt.show()
|
mitnk/letsencrypt
|
letsencrypt/renewal.py
|
Python
|
apache-2.0
| 15,711
| 0.000827
|
"""Functionality for autorenewal and associated juggling of configurations"""
from __future__ import print_function
import copy
import glob
import logging
import os
import traceback
import six
import zope.component
import OpenSSL
from letsencrypt import configuration
from letsencrypt import cli
from letsencrypt import constants
from letsencrypt import crypto_util
from letsencrypt import errors
from letsencrypt import interfaces
from letsencrypt import le_util
from letsencrypt import hooks
from letsencrypt import storage
from letsencrypt.plugins import disco as plugins_disco
logger = logging.getLogger(__name__)
# These are the items which get pulled out of a renewal configuration
# file's renewalparams and actually used in the client configuration
# during the renewal process. We have to record their types here because
# the renewal configuration process loses this information.
STR_CONFIG_ITEMS = ["config_dir", "logs_dir", "work_dir", "user_agent",
"server", "account", "authenticator", "installer",
"standalone_supported_challenges
|
"]
INT_CONFIG_ITEMS = ["rsa_key_size", "tls_sni_01_port", "http01_port"]
def renewal_conf_files(config):
"""Return /path/to/*.conf in the renewal conf directory"""
return glob.glob(os.path.join(config.renewal_conf
|
igs_dir, "*.conf"))
def _reconstitute(config, full_path):
"""Try to instantiate a RenewableCert, updating config with relevant items.
This is specifically for use in renewal and enforces several checks
and policies to ensure that we can try to proceed with the renwal
request. The config argument is modified by including relevant options
read from the renewal configuration file.
:param configuration.NamespaceConfig config: configuration for the
current lineage
:param str full_path: Absolute path to the configuration file that
defines this lineage
:returns: the RenewableCert object or None if a fatal error occurred
:rtype: `storage.RenewableCert` or NoneType
"""
try:
renewal_candidate = storage.RenewableCert(
full_path, configuration.RenewerConfiguration(config))
except (errors.CertStorageError, IOError):
logger.warning("Renewal configuration file %s is broken. Skipping.", full_path)
logger.debug("Traceback was:\n%s", traceback.format_exc())
return None
if "renewalparams" not in renewal_candidate.configuration:
logger.warning("Renewal configuration file %s lacks "
"renewalparams. Skipping.", full_path)
return None
renewalparams = renewal_candidate.configuration["renewalparams"]
if "authenticator" not in renewalparams:
logger.warning("Renewal configuration file %s does not specify "
"an authenticator. Skipping.", full_path)
return None
# Now restore specific values along with their data types, if
# those elements are present.
try:
_restore_required_config_elements(config, renewalparams)
_restore_plugin_configs(config, renewalparams)
except (ValueError, errors.Error) as error:
logger.warning(
"An error occurred while parsing %s. The error was %s. "
"Skipping the file.", full_path, error.message)
logger.debug("Traceback was:\n%s", traceback.format_exc())
return None
try:
config.domains = [le_util.enforce_domain_sanity(d)
for d in renewal_candidate.names()]
except errors.ConfigurationError as error:
logger.warning("Renewal configuration file %s references a cert "
"that contains an invalid domain name. The problem "
"was: %s. Skipping.", full_path, error)
return None
return renewal_candidate
def _restore_webroot_config(config, renewalparams):
"""
webroot_map is, uniquely, a dict, and the general-purpose configuration
restoring logic is not able to correctly parse it from the serialized
form.
"""
if "webroot_map" in renewalparams:
if not cli.set_by_cli("webroot_map"):
config.namespace.webroot_map = renewalparams["webroot_map"]
elif "webroot_path" in renewalparams:
logger.info("Ancient renewal conf file without webroot-map, restoring webroot-path")
wp = renewalparams["webroot_path"]
if isinstance(wp, str): # prior to 0.1.0, webroot_path was a string
wp = [wp]
config.namespace.webroot_path = wp
def _restore_plugin_configs(config, renewalparams):
"""Sets plugin specific values in config from renewalparams
:param configuration.NamespaceConfig config: configuration for the
current lineage
:param configobj.Section renewalparams: Parameters from the renewal
configuration file that defines this lineage
"""
# Now use parser to get plugin-prefixed items with correct types
# XXX: the current approach of extracting only prefixed items
# related to the actually-used installer and authenticator
# works as long as plugins don't need to read plugin-specific
# variables set by someone else (e.g., assuming Apache
# configurator doesn't need to read webroot_ variables).
# Note: if a parameter that used to be defined in the parser is no
# longer defined, stored copies of that parameter will be
# deserialized as strings by this logic even if they were
# originally meant to be some other type.
if renewalparams["authenticator"] == "webroot":
_restore_webroot_config(config, renewalparams)
plugin_prefixes = []
else:
plugin_prefixes = [renewalparams["authenticator"]]
if renewalparams.get("installer", None) is not None:
plugin_prefixes.append(renewalparams["installer"])
for plugin_prefix in set(plugin_prefixes):
for config_item, config_value in six.iteritems(renewalparams):
if config_item.startswith(plugin_prefix + "_") and not cli.set_by_cli(config_item):
# Values None, True, and False need to be treated specially,
# As their types aren't handled correctly by configobj
if config_value in ("None", "True", "False"):
# bool("False") == True
# pylint: disable=eval-used
setattr(config.namespace, config_item, eval(config_value))
else:
cast = cli.argparse_type(config_item)
setattr(config.namespace, config_item, cast(config_value))
def _restore_required_config_elements(config, renewalparams):
"""Sets non-plugin specific values in config from renewalparams
:param configuration.NamespaceConfig config: configuration for the
current lineage
:param configobj.Section renewalparams: parameters from the renewal
configuration file that defines this lineage
"""
# string-valued items to add if they're present
for config_item in STR_CONFIG_ITEMS:
if config_item in renewalparams and not cli.set_by_cli(config_item):
value = renewalparams[config_item]
# Unfortunately, we've lost type information from ConfigObj,
# so we don't know if the original was NoneType or str!
if value == "None":
value = None
setattr(config.namespace, config_item, value)
# int-valued items to add if they're present
for config_item in INT_CONFIG_ITEMS:
if config_item in renewalparams and not cli.set_by_cli(config_item):
config_value = renewalparams[config_item]
# the default value for http01_port was None during private beta
if config_item == "http01_port" and config_value == "None":
logger.info("updating legacy http01_port value")
int_value = cli.flag_default("http01_port")
else:
try:
int_value = int(config_value)
except ValueError:
raise errors.Error(
"Expected a numeric value for {0}".f
|
testmana2/test
|
Plugins/VcsPlugins/vcsMercurial/RebaseExtension/rebase.py
|
Python
|
gpl-3.0
| 4,473
| 0.00313
|
# -*- coding: utf-8 -*-
# Copyright (c) 2011 - 2015 Detlev Offenbach <detlev@die-offenbachs.de>
#
"""
Module implementing the rebase extension interface.
"""
from __future__ import unicode_literals
import os
from PyQt5.QtWidgets import QDialog
from ..HgExtension import HgExtension
from ..HgDialog import HgDialog
class Rebase(HgExtension):
"""
Class implementing the rebase extension interface.
"""
def __init__(self, vcs):
"""
Constructor
@param vcs reference to the Mercurial vcs object
"""
super(Rebase, self).__init__(vcs)
def hgRebase(self, path):
"""
Public method to rebase changesets to a different branch.
@param path directory name of the project (string)
@return flag indicating that the project should be reread (boolean)
"""
# find the root of the repo
repodir = self.vcs.splitPath(path)[0]
while not os.path.isdir(os.path.join(repodir, self.vcs.adminDir)):
repodir = os.path.dirname(repodir)
if os.path.splitdrive(repodir)[1] == os.sep:
return False
res = False
from .HgRebaseDialog import HgRebaseDialog
dlg = HgRebaseDialog(self.vcs.hgGetTagsList(repodir),
self.vcs.hgGetBranchesList(repodir),
self.vcs.hgGetBookmarksList(repodir))
if dlg.exec_() == QDialog.Accepted:
(indicator, sourceRev, destRev, collapse, keep, keepBranches,
detach) = dlg.getData()
args = self.vcs.initCommand("rebase")
if indicator == "S":
args.append("--source")
args.append(sourceRev)
elif indicator == "B":
args.append("--base")
args.append(sourceRev)
if destRev:
args.append("--dest")
args.append(destRev)
if collapse:
args.append("--collapse")
if keep:
args.append("--keep")
if keepBranches:
args.append("--keepbranches")
if detach:
args.append("--detach")
args.append("--verbose")
dia = HgDialog(self.tr('Rebase Changesets'), self.vcs)
res = dia.startProcess(args, repodir)
if res:
dia.exec_()
res = dia.hasAddOrDelete()
self.vcs.checkVCSStatus()
return res
def hgRebaseContinue(self, path):
"""
Public method to continue rebasing changesets from another branch.
@param path directory name of the project (string)
@return flag indicating that the project should be reread (boolean)
"""
# find the root of the repo
repodir = self.vcs.splitPath(path)[0]
while not os.path.isdir(os.path.join(repodir, self.vcs.adminDir)):
repodir = os.path.dirname(repodir)
if os.path.splitdrive(repodir)[1] == os.sep:
return False
args = self.vcs.initCommand("rebase")
args.append("--continue")
args.append("--verbose")
dia = HgDialog(self.tr('Rebase Changesets (Continue)'), self.vcs)
res = dia.startProcess(args, repodir)
if res:
dia.exec_()
res = dia.hasAddOrDelete()
self.vcs.checkVCSStatus()
return res
def hgRebaseAbort(self, path):
"""
Public method to abort rebasing changesets from another branch.
@param path directory name of the project (string)
@return flag indicating that the project should be reread (boolean)
"""
# find the root of the repo
repodir = self.vcs.splitPath(path)[0]
while not os.path.isdir(os.path.
|
join(repodir, self.vcs.adminDir)):
repodir = os.path.dirname(repodir)
if os.path.splitdrive(repodir)[1] == os.sep:
return False
args = self.vcs.initComman
|
d("rebase")
args.append("--abort")
args.append("--verbose")
dia = HgDialog(self.tr('Rebase Changesets (Abort)'), self.vcs)
res = dia.startProcess(args, repodir)
if res:
dia.exec_()
res = dia.hasAddOrDelete()
self.vcs.checkVCSStatus()
return res
|
aronsky/home-assistant
|
tests/components/tasmota/test_sensor.py
|
Python
|
apache-2.0
| 33,360
| 0.0006
|
"""The tests for the Tasmota sensor platform."""
import copy
import datetime
from datetime import timedelta
import json
from unittest.mock import Mock, patch
import hatasmota
from hatasmota.utils import (
get_topic_stat_status,
get_topic_tele_sensor,
get_topic_tele_will,
)
import pytest
from homeassistant import config_entries
from homeassistant.components import sensor
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from homeassistant.const import ATTR_ASSUMED_STATE, STATE_UNKNOWN
from homeassistant.helpers import entity_registry as er
from homeassistant.util import dt
from .test_common import (
DEFAULT_CONFIG,
help_test_availability,
help_test_availability_discovery_update,
help_test_availability_poll_state,
help_test_availability_when_connection_lost,
help_test_discovery_device_remove,
help_test_discovery_removal,
help_test_discovery_update_unchanged,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
)
from tests.common import async_fire_mqtt_message, async_fire_time_changed
DEFAULT_SENSOR_CONFIG = {
"sn": {
"Time": "2020-09-25T12:47:15",
"DHT11": {"Temperature": None},
"TempUnit": "C",
}
}
BAD_INDEXED_SENSOR_CONFIG_3 = {
"sn": {
"Time": "2020-09-25T12:47:15",
"ENERGY": {
"ApparentPower": [7.84, 1.23, 2.34],
},
}
}
INDEXED_SENSOR_CONFIG = {
"sn": {
"Time": "2020-09-25T12:47:15",
"ENERGY": {
"TotalStartTime": "2018-11-23T15:33:47",
"Total": 0.017,
"TotalTariff": [0.000, 0.017],
"Yesterday": 0.000,
"Today": 0.002,
"ExportActive": 0.000,
"ExportTariff": [0.000, 0.000],
"Period": 0.00,
"Power": 0.00,
"ApparentPower": 7.84,
"ReactivePower": -7.21,
"Factor": 0.39,
"Frequency": 50.0,
"Voltage": 234.31,
"Current": 0.039,
"ImportActive": 12.580,
"ImportReactive": 0.002,
"ExportReactive": 39.131,
"PhaseAngle": 290.45,
},
}
}
INDEXED_SENSOR_CONFIG_2 = {
"sn": {
"Time": "2020-09-25T12:47:15",
"ENERGY": {
"TotalStartTime": "2018-11-23T15:33:47",
"Total": [0.000, 0.017],
"TotalTariff": [0.000, 0.017],
"Yesterday": 0.000,
"Today": 0.002,
"ExportActive": 0.000,
"ExportTariff": [0.000, 0.000],
"Period": 0.00,
"Power": 0.00,
"ApparentPower": 7.84,
"ReactivePower": -7.21,
"Factor": 0.39,
"Frequency": 50.0,
"Voltage": 234.31,
"Current": 0.039,
"ImportActive": 12.580,
"ImportReactive": 0.002,
"ExportReactive": 39.131,
"PhaseAngle": 290.45,
},
}
}
NESTED_SENSOR_CONFIG = {
"sn": {
"Time": "2020-03-03T00:00:00+00:00",
"TX23": {
"Speed": {"Act": 14.8, "Avg": 8.5, "Min": 12.2, "Max": 14.8},
"Dir": {
"Card": "WSW",
"Deg": 247.5,
"Avg": 266.1,
"AvgCard": "W",
"Range": 0,
},
},
"SpeedUnit": "km/h",
}
}
async def test_controlling_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(DEFAULT_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
entity_reg = er.async_get(hass)
entry = entity_reg.async_get("sensor.tasmota_dht11_temperature")
assert entry.disabled is False
assert entry.disabled_by is None
assert entry.entity_category is None
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"DHT11":{"Temperature":20.5}}'
)
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == "20.5"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"DHT11":{"Temperature":20.0}}}',
)
state = hass.states.get("sensor.tasmota_dht11_temperature")
assert state.state == "20.0"
async def test_nested_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(NESTED_SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"TX23":{"Speed":{"Act":"12.3"}}}'
)
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "12.3"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"TX23":{"Speed":{"Act":"23.4"}}}}',
)
state = hass.states.get("sensor.tasmota_tx23_speed_act")
assert state.state == "23.4"
async def test_indexed_sensor_state_via_mqtt(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT."""
config = copy.deepcopy(DEFAULT_CONFIG)
sensor_config = copy.deepcopy(INDEXED_
|
SENSOR_CONFIG)
mac = config["mac"]
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/config",
json.dumps(config),
)
|
await hass.async_block_till_done()
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{mac}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == "unavailable"
assert not state.attributes.get(ATTR_ASSUMED_STATE)
async_fire_mqtt_message(hass, "tasmota_49A3BC/tele/LWT", "Online")
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == STATE_UNKNOWN
assert not state.attributes.get(ATTR_ASSUMED_STATE)
# Test periodic state update
async_fire_mqtt_message(
hass, "tasmota_49A3BC/tele/SENSOR", '{"ENERGY":{"TotalTariff":[1.2,3.4]}}'
)
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == "3.4"
# Test polled state update
async_fire_mqtt_message(
hass,
"tasmota_49A3BC/stat/STATUS10",
'{"StatusSNS":{"ENERGY":{"TotalTariff":[5.6,7.8]}}}',
)
state = hass.states.get("sensor.tasmota_energy_totaltariff_1")
assert state.state == "7.8"
async def test_indexed_sensor_state_via_mqtt2(hass, mqtt_mock, setup_tasmota):
"""Test state update via MQTT for sensor wi
|
chriszs/redash
|
tests/test_authentication.py
|
Python
|
bsd-2-clause
| 13,446
| 0.002157
|
import os
import time
from six.moves import reload_module
from flask import request
from mock import patch
from redash import models, settings
from redash.authentication import (api_key_load_user_from_request,
get_login_url, hmac_load_user_from_request,
sign)
from redash.authentication.google_oauth import (create_and_login_user,
verify_profile)
from redash.utils import utcnow
from sqlalchemy.orm.exc import NoResultFound
from tests import BaseTestCase
class TestApiKeyAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to write them properly...
#
def setUp(self):
super(TestApiKeyAuthentication, self).setUp()
self.api_key = '10'
self.query = self.factory.create_query(api_key=self.api_key)
models.db.session.flush()
self.query_url = '/{}/api/queries/{}'.format(self.factory.org.slug, self.query.id)
self.queries_url = '/{}/api/queries'.format(self.factory.org.slug)
def test_no_api_key(self):
with self.app.test_client() as c:
rv = c.get(self.query_url)
self.assertIsNone(api_key_load_user_from_request(request))
def test_wrong_api_key(self):
with self.app.test_client() as c:
rv = c.get(self.query_url, query_string={'api_key': 'whatever'})
self.assertIsNone(api_key_load_user_from_request(request))
def test_correct_api_key(self):
with self.app.test_client() as c:
rv = c.get(self.query_url, query_string={'api_key': self.api_key})
self.assertIsNotNone(api_key_load_user_from_request(request))
def test_no_query_id(self):
with self.app.test_client() as c:
rv = c.get(self.queries_url, query_string={'api_key': self.api_key})
self.assertIsNone(api_key_load_user_from_request(request))
def test_user_api_key(self):
user = self.factory.create_user(api_key="user_key")
models.db.session.flush()
with self.app.test_client() as c:
rv = c.get(self.queries_url, query_string={'api_key': user.api_key})
self.assertEqual(user.id, api_key_load_user_from_request(request).id)
def test_disabled_user_api_key(self):
user = self.factory.create_user(api_key="user_key")
user.disable()
models.db.session.flush()
with self.app.test_client() as c:
rv = c.get(self.queries_url, query_string={'api_key': user.api_key})
self.assertEqual(None, api_key_load_user_from_request(request))
def test_api_key_header(self):
with self.app.test_client() as c:
rv = c.get(self.query_url, headers={'Authorization': "Key {}".format(self.api_key)})
self.assertIsNotNone(api_key_load_user_from_request(request))
def test_api_key_header_with_wrong_key(self):
with self.app.test_client() as c:
rv = c.get(self.query_url, headers={'Authorization': "Key oops"})
self.assertIsNone(api_key_load_user_from_request(request))
def test_api_key_for_wrong_org(self):
other_user = self.factory.create_admin(org=self.factory.create_org())
with self.app.test_client() as c:
rv = c.get(self.query_url, headers={'Authorization': "Key {}".format(other_user.api_key)})
self.assertEqual(404, rv.status_code)
class TestHMACAuthentication(BaseTestCase):
#
# This is a bad way to write these tests, but the way Flask works doesn't make it easy to wri
|
te them properly...
#
def setUp(self):
super(TestHMACAuthentication, self).setUp()
self.api_key = '10'
self.query = self.factory.create_query(api_key=self.api_key)
models.db.session.flush()
self.path = '/{}/api/queries/{}'.format(self.query.org.slug, self.query.id)
self.expires = time.time() + 1800
def signature(self, expires):
return sign(
|
self.query.api_key, self.path, expires)
def test_no_signature(self):
with self.app.test_client() as c:
rv = c.get(self.path)
self.assertIsNone(hmac_load_user_from_request(request))
def test_wrong_signature(self):
with self.app.test_client() as c:
rv = c.get(self.path, query_string={'signature': 'whatever', 'expires': self.expires})
self.assertIsNone(hmac_load_user_from_request(request))
def test_correct_signature(self):
with self.app.test_client() as c:
rv = c.get(self.path, query_string={'signature': self.signature(self.expires), 'expires': self.expires})
self.assertIsNotNone(hmac_load_user_from_request(request))
def test_no_query_id(self):
with self.app.test_client() as c:
rv = c.get('/{}/api/queries'.format(self.query.org.slug), query_string={'api_key': self.api_key})
self.assertIsNone(hmac_load_user_from_request(request))
def test_user_api_key(self):
user = self.factory.create_user(api_key="user_key")
path = '/api/queries/'
models.db.session.flush()
signature = sign(user.api_key, path, self.expires)
with self.app.test_client() as c:
rv = c.get(path, query_string={'signature': signature, 'expires': self.expires, 'user_id': user.id})
self.assertEqual(user.id, hmac_load_user_from_request(request).id)
class TestCreateAndLoginUser(BaseTestCase):
def test_logins_valid_user(self):
user = self.factory.create_user(email=u'test@example.com')
with patch('redash.authentication.login_user') as login_user_mock:
create_and_login_user(self.factory.org, user.name, user.email)
login_user_mock.assert_called_once_with(user, remember=True)
def test_creates_vaild_new_user(self):
email = u'test@example.com'
name = 'Test User'
with patch('redash.authentication.login_user') as login_user_mock:
create_and_login_user(self.factory.org, name, email)
self.assertTrue(login_user_mock.called)
user = models.User.query.filter(models.User.email == email).one()
self.assertEqual(user.email, email)
def test_updates_user_name(self):
user = self.factory.create_user(email=u'test@example.com')
with patch('redash.authentication.login_user') as login_user_mock:
create_and_login_user(self.factory.org, "New Name", user.email)
login_user_mock.assert_called_once_with(user, remember=True)
class TestVerifyProfile(BaseTestCase):
def test_no_domain_allowed_for_org(self):
profile = dict(email=u'arik@example.com')
self.assertFalse(verify_profile(self.factory.org, profile))
def test_domain_not_in_org_domains_list(self):
profile = dict(email=u'arik@example.com')
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org']
self.assertFalse(verify_profile(self.factory.org, profile))
def test_domain_in_org_domains_list(self):
profile = dict(email=u'arik@example.com')
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.com']
self.assertTrue(verify_profile(self.factory.org, profile))
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org', 'example.com']
self.assertTrue(verify_profile(self.factory.org, profile))
def test_org_in_public_mode_accepts_any_domain(self):
profile = dict(email=u'arik@example.com')
self.factory.org.settings[models.Organization.SETTING_IS_PUBLIC] = True
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = []
self.assertTrue(verify_profile(self.factory.org, profile))
def test_user_not_in_domain_but_account_exists(self):
profile = dict(email=u'arik@example.com')
self.factory.create_user(email=u'arik@example.com')
self.factory.org.settings[models.Organization.SETTING_GOOGLE_APPS_DOMAINS] = ['example.org']
self.ass
|
the-zebulan/CodeWars
|
tests/kyu_7_tests/test_complementary_dna.py
|
Python
|
mit
| 364
| 0
|
import unittest
from katas.kyu_7.complementary_dna import DNA_strand
class DNAStrandTestCase(unittest.TestCase):
def test_equals(self):
self.assertEqual(DNA_strand('AAAA'), 'TTTT')
def test_equals_2(self):
self.assertEqual(DNA_strand('ATTGC'), 'TAACG')
def
|
test_equa
|
ls_3(self):
self.assertEqual(DNA_strand('GTAT'), 'CATA')
|
pathway27/games-prac
|
python3/tictactoe.py
|
Python
|
mit
| 6,138
| 0.006028
|
# Tic Tac Toe
# Tic Tac Toe
import random
def drawBoard(board):
# This function prints out the board that it was passed.
# "board" is a list of 10 strings representing the board (ignore index 0)
print(' | |')
print(' ' + board[7] + ' | ' + board[8] + ' | ' + board[9])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[4] + ' | ' + board[5] + ' | ' + board[6])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[1] + ' | ' + board[2] + ' | ' + board[3])
print(' | |')
def inputPlayerLetter():
# Let's the player type which letter they want to be.
# Returns a
|
list with the player's letter as the first item, and the computer's letter as the second.
letter = ''
while not (letter == 'X' or letter == 'O'):
print('Do you want to be X or O?')
letter = input().upper()
# the first element in the tuple is the player's letter, the second is the computer's letter.
if letter == 'X':
|
return ['X', 'O']
else:
return ['O', 'X']
def whoGoesFirst():
# Randomly choose the player who goes first.
if random.randint(0, 1) == 0:
return 'computer'
else:
return 'player'
def playAgain():
# This function returns True if the player wants to play again, otherwise it returns False.
print('Do you want to play again? (yes or no)')
return input().lower().startswith('y')
def makeMove(board, letter, move):
board[move] = letter
def isWinner(bo, le):
# Given a board and a player's letter, this function returns True if that player has won.
# We use bo instead of board and le instead of letter so we don't have to type as much.
return ((bo[7] == le and bo[8] == le and bo[9] == le) or # across the top
(bo[4] == le and bo[5] == le and bo[6] == le) or # across the middle
(bo[1] == le and bo[2] == le and bo[3] == le) or # across the bottom
(bo[7] == le and bo[4] == le and bo[1] == le) or # down the left side
(bo[8] == le and bo[5] == le and bo[2] == le) or # down the middle
(bo[9] == le and bo[6] == le and bo[3] == le) or # down the right side
(bo[7] == le and bo[5] == le and bo[3] == le) or # diagonal
(bo[9] == le and bo[5] == le and bo[1] == le)) # diagonal
def getBoardCopy(board):
# Make a duplicate of the board list and return it the duplicate.
dupeBoard = []
for i in board:
dupeBoard.append(i)
return dupeBoard
def isSpaceFree(board, move):
# Return true if the passed move is free on the passed board.
return board[move] == ' '
def getPlayerMove(board):
# Let the player type in his move.
move = ' '
while move not in '1 2 3 4 5 6 7 8 9'.split() or not isSpaceFree(board, int(move)):
print('What is your next move? (1-9)')
move = input()
return int(move)
def chooseRandomMoveFromList(board, movesList):
# Returns a valid move from the passed list on the passed board.
# Returns None if there is no valid move.
possibleMoves = []
for i in movesList:
if isSpaceFree(board, i):
possibleMoves.append(i)
if len(possibleMoves) != 0:
return random.choice(possibleMoves)
else:
return None
def getComputerMove(board, computerLetter):
# Given a board and the computer's letter, determine where to move and return that move.
if computerLetter == 'X':
playerLetter = 'O'
else:
playerLetter = 'X'
# Here is our algorithm for our Tic Tac Toe AI:
# First, check if we can win in the next move
for i in range(1, 10):
copy = getBoardCopy(board)
if isSpaceFree(copy, i):
makeMove(copy, computerLetter, i)
if isWinner(copy, computerLetter):
return i
# Check if the player could win on his next move, and block them.
for i in range(1, 10):
copy = getBoardCopy(board)
if isSpaceFree(copy, i):
makeMove(copy, playerLetter, i)
if isWinner(copy, playerLetter):
return i
# Try to take one of the corners, if they are free.
move = chooseRandomMoveFromList(board, [1, 3, 7, 9])
if move != None:
return move
# Try to take the center, if it is free.
if isSpaceFree(board, 5):
return 5
# Move on one of the sides.
return chooseRandomMoveFromList(board, [2, 4, 6, 8])
def isBoardFull(board):
# Return True if every space on the board has been taken. Otherwise return False.
for i in range(1, 10):
if isSpaceFree(board, i):
return False
return True
print('Welcome to Tic Tac Toe!')
while True:
# Reset the board
theBoard = [' '] * 10
playerLetter, computerLetter = inputPlayerLetter()
turn = whoGoesFirst()
print('The ' + turn + ' will go first.')
gameIsPlaying = True
while gameIsPlaying:
if turn == 'player':
# Player's turn.
drawBoard(theBoard)
move = getPlayerMove(theBoard)
makeMove(theBoard, playerLetter, move)
if isWinner(theBoard, playerLetter):
drawBoard(theBoard)
print('Hooray! You have won the game!')
gameIsPlaying = False
else:
if isBoardFull(theBoard):
drawBoard(theBoard)
print('The game is a tie!')
break
else:
turn = 'computer'
else:
# Computer's turn.
move = getComputerMove(theBoard, computerLetter)
makeMove(theBoard, computerLetter, move)
if isWinner(theBoard, computerLetter):
drawBoard(theBoard)
print('The computer has beaten you! You lose.')
gameIsPlaying = False
else:
if isBoardFull(theBoard):
drawBoard(theBoard)
print('The game is a tie!')
break
else:
turn = 'player'
if not playAgain():
break
|
blueyed/coveragepy
|
coverage/multiproc.py
|
Python
|
apache-2.0
| 3,478
| 0.00115
|
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
"""Monkey-patching to add multiprocessing support for coverage.py"""
import multiprocessing
import multiprocessing.process
import os
from coverage import env
from coverage.misc import contract
# An attribute that will be set on the module to indicate that it has been
# monkey-patched.
PATCHED_MARKER = "_coverage$patched"
if env.PYVERSION >= (3, 4):
OriginalProcess = multiprocessing.process.BaseProcess
else:
OriginalProcess = multiprocessing.Process
original_bootstrap = OriginalProcess._bootstrap
class ProcessWithCoverage(OriginalProcess):
"""A replacement for multiprocess.Process that starts coverage."""
def _bootstrap(self):
"""Wrapper around _bootstrap to start coverage."""
from coverage import Coverage # avoid circular import
cov = Coverage(data_suffix=True)
cov._warn_preimported_source = False
cov.start()
debug = cov._debug
try:
if debug.should("multiproc"):
debug.write("Calling multiprocessing bootstrap")
return original_bootstrap(self)
finally:
if debug.should("multiproc"):
debug.write("Finished multiprocessing bootstrap")
cov.stop()
cov.save()
if debug.should("multiproc"):
debug.write("Saved multiprocessing data")
class Stowaway(object):
"""An object to pickle, so when it is unpickled, it can apply the monkey-patch."""
def __init__(self, rcfile):
self.rcfile = rcfile
def __getstate__(self):
return {'rcfile': self.rcfile}
def __setstate__(self, state):
patch_multiprocessing(state['rcfile'])
@contract(rcfile=str)
def patch_multiprocessing(rcfile):
"""Monkey-patch the multiprocessing module.
This enables coverage measurement of processes started by multiprocessing.
This involves aggressive monkey-patching.
`rcfile` is the path to the rcfile being used.
"""
if hasattr(multiprocessing, PATCHED_MARKER):
return
if env.PYVERSION >= (3, 4):
OriginalProcess._bootstrap = ProcessWithCoverage._bootstrap
else:
multiprocessing.Process = ProcessWithCoverage
# Set the value in ProcessWithCoverage that will be pickled into the child
# process.
os.environ["COVERAGE_RCFILE"] = rcfile
# When spawning processes rather than forking them, we have no state in the
# new process. We sneak in there with a Stowaway: we stuff one of our own
|
# objects into the data that gets pick
|
led and sent to the sub-process. When
# the Stowaway is unpickled, it's __setstate__ method is called, which
# re-applies the monkey-patch.
# Windows only spawns, so this is needed to keep Windows working.
try:
from multiprocessing import spawn
original_get_preparation_data = spawn.get_preparation_data
except (ImportError, AttributeError):
pass
else:
def get_preparation_data_with_stowaway(name):
"""Get the original preparation data, and also insert our stowaway."""
d = original_get_preparation_data(name)
d['stowaway'] = Stowaway(rcfile)
return d
spawn.get_preparation_data = get_preparation_data_with_stowaway
setattr(multiprocessing, PATCHED_MARKER, True)
|
dardevelin/rhythmbox-shuffle
|
plugins/context/ArtistTab.py
|
Python
|
gpl-2.0
| 13,087
| 0.015741
|
# -*- Mode: python; coding: utf-8; tab-width: 8; indent-tabs-mode: t; -*-
#
# Copyright (C) 2009 John Iacona
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# The Rhythmbox authors hereby grant permission for non-GPL compatible
# GStreamer plugins to be used and distributed together with GStreamer
# and Rhythmbox. This permission is above and beyond the permissions granted
# by the GPL license by which Rhythmbox is covered. If you modify this code
# you may extend this exception to your version of the code, but you are not
# obligated to do so. If you do not wish to do so, delete this exception
# statement from your version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
import re, os
import cgi
import urllib
import xml.dom.minidom as dom
from mako.template import Template
import rb
import LastFM
from gi.repository import WebKit
from gi.repository import GObject, Gtk
from gi.repository import RB
import gettext
gettext.install('rhythmbox', RB.locale_dir())
class ArtistTab (GObject.GObject):
__gsignals__ = {
'switch-tab' : (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE,
(GObject.TYPE_STRING,))
}
def __init__ (self, shell, buttons, ds, view):
GObject.GObject.__init__ (self)
self.shell = shell
self.sp = shell.props.shell_player
self.db = shell.props.db
self.buttons = buttons
self.button = Gtk.ToggleButton (label=_("Artist"))
self.datasource = ds
self.view = view
self.artist = None
self.active = False
self.button.show()
self.button.set_relief (Gtk.ReliefStyle.NONE)
self.button.set_focus_on_click(False)
self.button.connect ('clicked',
lambda button : self.emit('switch-tab', 'artist'))
buttons.pack_start (self.button, True, True, 0)
def activate (self):
print "activating Artist Tab"
self.button.set_active(True)
self.active = True
self.reload ()
def deactivate (self):
print "deactivating Artist Tab"
self.button.set_active(False)
self.active = False
def reload (self):
entry =
|
self.sp.get_playing_entry ()
if entry is None:
print "Nothing playing"
return None
artist = entry.get_string (RB.RhythmDBPropType.ARTIST)
if self.active and self.artist != artist:
self.datasource.fetch_artist_data (artist)
self.view.loading (artist)
else:
self.view.load_view()
self.artist = artist
class ArtistView (GObject.GObject):
def __init
|
__ (self, shell, plugin, webview, ds):
GObject.GObject.__init__ (self)
self.webview = webview
self.ds = ds
self.shell = shell
self.plugin = plugin
self.file = ""
plugindir = plugin.plugin_info.get_data_dir()
self.basepath = "file://" + urllib.pathname2url (plugindir)
self.load_tmpl ()
self.connect_signals ()
def load_view (self):
self.webview.load_string (self.file, 'text/html', 'utf-8', self.basepath)
def loading (self, current_artist):
self.loading_file = self.loading_template.render (
artist = current_artist,
info = _("Loading biography for %s") % current_artist,
song = "",
basepath = self.basepath)
self.webview.load_string (self.loading_file, 'text/html', 'utf-8', self.basepath)
def load_tmpl (self):
self.path = rb.find_plugin_file(self.plugin, 'tmpl/artist-tmpl.html')
self.loading_path = rb.find_plugin_file (self.plugin, 'tmpl/loading.html')
self.template = Template (filename = self.path, module_directory = self.plugin.tempdir)
self.loading_template = Template (filename = self.loading_path, module_directory = self.plugin.tempdir)
self.styles = self.basepath + '/tmpl/main.css'
def connect_signals (self):
self.air_id = self.ds.connect ('artist-info-ready', self.artist_info_ready)
def artist_info_ready (self, ds):
# Can only be called after the artist-info-ready signal has fired.
# If called any other time, the behavior is undefined
try:
info = ds.get_artist_info ()
small, med, big = info['images'] or (None, None, None)
summary, full_bio = info['bio'] or (None, None)
self.file = self.template.render (artist = ds.get_current_artist (),
error = ds.get_error (),
image = med,
fullbio = full_bio,
shortbio = summary,
datasource = LastFM.datasource_link (self.basepath),
stylesheet = self.styles )
self.load_view ()
except Exception, e:
print "Problem in info ready: %s" % e
class ArtistDataSource (GObject.GObject):
__gsignals__ = {
'artist-info-ready' : (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'artist-similar-ready' : (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'artist-top-tracks-ready' : (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
'artist-top-albums-ready' : (GObject.SIGNAL_RUN_LAST, GObject.TYPE_NONE, ()),
}
def __init__ (self, info_cache, ranking_cache):
GObject.GObject.__init__ (self)
self.current_artist = None
self.error = None
self.artist = {
'info' : {
'data' : None,
'signal' : 'artist-info-ready',
'function' : 'getinfo',
'cache' : info_cache,
'parsed' : False,
},
'similar' : {
'data' : None,
'signal' : 'artist-similar-ready',
'function' : 'getsimilar',
'cache' : info_cache,
'parsed' : False,
},
'top_albums' : {
'data' : None,
'signal' : 'artist-top-albums-ready',
'function' : 'gettopalbums',
'cache' : ranking_cache,
'parsed' : False,
},
'top_tracks' : {
'data' : None,
'signal' : 'artist-top-tracks-ready',
'function' : 'gettoptracks',
'cache' : ranking_cache,
'parsed' : False,
},
}
def extract (self, data, position):
"""
Safely extract the data from an xml node. Returns data
at position or None if position does not exist
"""
try:
return data[position].firstChild.data
except Exception, e:
return None
def fetch_top_tracks (self, artist):
if LastFM.user_has_account() is False:
return
artist = urllib.quote_plus (artist)
function = self.artist['top_tracks']['function']
cache = self.artist['top_tracks']['cache']
cachekey = "lastfm:artist:%s:%s" % (function, artist)
url = '%sartist.%s&artist=%s&api_key=%s' % (LastFM.URL_PREFIX,
function, artist, LastFM.API_KEY)
cache.fetch(cachekey, url, self.fetch_artist_d
|
kurtrr/open-numismat
|
OpenNumismat/Settings.py
|
Python
|
gpl-3.0
| 14,504
| 0.000622
|
# -*- coding: utf-8 -*-
from PyQt5.QtCore import Qt, QLocale, QSettings, QMargins
from PyQt5.QtWidgets import *
import OpenNumismat
from OpenNumismat.EditCoinDialog.FormItems import NumberEdit
from OpenNumismat.Collection.CollectionFields import CollectionFieldsBase
from OpenNumismat.Reports import Report
from OpenNumismat.Tools.DialogDecorators import storeDlgSizeDecorator
class BaseSettings(dict):
def __init__(self, autoSave=False):
self.__autoSave = autoSave
self.__items = {}
def keys(self):
raise NotImplementedError
def items(self):
result = []
for key in self.keys():
result.append((key, self.__getitem__(key)))
return result
def values(self):
result = []
for key in self.keys():
result.append(self.__getitem__(key))
return result
def __getitem__(self, key):
if key in self.__items:
return self.__items[key]
if key in self.keys():
value = self._getValue(key)
self.__items[key] = value
return value
else:
raise KeyError(key)
def __setitem__(self, key, val):
if key in self.keys():
self.__items[key] = val
if self.__autoSave:
self._saveValue(key, val)
else:
raise KeyError(key)
def setAutoSave(self, autoSave):
self.__autoSave = autoSave
def autoSave(self):
return self.__autoSave
def save(self):
for key in self.keys():
self._saveValue(key, self.__getitem__(key))
def _getValue(self, key):
raise NotImplementedError
def _saveValue(self, key, val):
raise NotImplementedError
def _getLocale():
locale = QLocale.system().name()
if '_' in locale:
return locale.split('_')[0]
else:
return locale
class Settings(BaseSettings):
Default = {'locale': _getLocale(),
'backup': OpenNumismat.HOME_PATH + "/backup/",
'reference': OpenNumismat.HOME_PATH + "/reference.ref",
'error': False, 'updates': False,
'free_numeric': False,
'store_sorting': False,
'sort_filter': True,
'sort_tree': True,
'title_format': 'default',
'image_name': False,
'image_path': OpenNumismat.HOME_PATH + "/ONImages/",
'id_dates': False, #krr:todo: prompt for this on import & reset
'template': 'cbr',
'ImageSideLen': 1024}
def __init__(self, autoSave=False):
super(Settings, self).__init__(autoSave)
self.settings = QSettings()
def keys(self):
return self.Default.keys()
def _getValue(self, key):
value = self.settings.value('mainwindow/' + key)
if value:
if key in ('error', 'updates', 'free_numeric', 'store_sorting',
'sort_filter', 'sort_tree', 'image_name', 'id_dates'):
# Convert boolean value
value = (value == 'true')
else:
value = self.Default[key]
return value
def _saveValue(self, key, val):
self.settings.setValue('mainwindow/' + key, val)
class MainSettingsPage(QWidget):
Languages = [("Čeština", 'cs'), ("English", 'en'), ("Ελληνικά", 'el'),
("Español", 'es'), ("Deutsch", 'de'), ("Italiano", 'it'),
("Magyar", 'hu'), ("Polski", 'pl'), ("Português", 'pt'),
("Русский", 'ru'), ("Український", 'uk')]
def __init__(self, collection, parent=None):
super(MainSettingsPage, self).__init__(parent)
settings = Settings()
layout = QFormLayout()
layout.setRowWrapPolicy(QFormLayout.WrapLongRows)
current = 0
self.languageSelector = QComboBox(self)
for i, lang in enumerate(self.Languages):
self.languageSelector.addItem(lang[0], lang[1])
if settings['locale'] == lang[1]:
current = i
self.languageSelector.setCurrentIndex(current)
self.languageSelector.setSizePolicy(QSizePolicy.Fixed,
QSizePolicy.Fixed)
layout.addRow(self.tr("Language"), self.languageSelector)
self.backupFolder = QLineEdit(self)
self.backupFolder.setMinimumWidth(120)
self.backupFolder.setText(settings['backup'])
style = QApplication.style()
icon = style.standardIcon(QStyle.SP_DirOpenIcon)
self.backupFolderButton = QPushButton(icon, '', self)
self.backupFolderButton.clicked.connect(self.backupButtonClicked)
hLayout = QHBoxLayout()
hLayout.addWidget(self.backupFolder)
hLayout.addWidget(self.backupFolderButton)
hLayout.setContentsMargins(QMargins())
layout.addRow(self.tr("Backup folder"), hLayout)
self.reference =
|
QLineEdit(self)
self.reference.setMinimumWidth(120)
self.reference.setText(settings['reference'])
icon = style.standardIcon(QStyle.SP_DialogOpenButton)
self.referenceButton = QPushButton(icon
|
, '', self)
self.referenceButton.clicked.connect(self.referenceButtonClicked)
hLayout = QHBoxLayout()
hLayout.addWidget(self.reference)
hLayout.addWidget(self.referenceButton)
hLayout.setContentsMargins(QMargins())
layout.addRow(self.tr("Reference"), hLayout)
self.errorSending = QCheckBox(
self.tr("Send error info to author"), self)
self.errorSending.setChecked(settings['error'])
layout.addRow(self.errorSending)
self.checkUpdates = QCheckBox(
self.tr("Automatically check for updates"), self)
self.checkUpdates.setChecked(settings['updates'])
layout.addRow(self.checkUpdates)
self.imageSideLen = NumberEdit(self)
self.imageSideLen.setMaximumWidth(60)
layout.addRow(self.tr("Max image side len"), self.imageSideLen)
self.imageSideLen.setText(str(settings['ImageSideLen']))
self.freeNumeric = QCheckBox(
self.tr("Free format numeric fields"), self)
self.freeNumeric.setChecked(settings['free_numeric'])
layout.addRow(self.freeNumeric)
self.storeSorting = QCheckBox(
self.tr("Store column sorting"), self)
self.storeSorting.setChecked(settings['store_sorting'])
layout.addRow(self.storeSorting)
self.sortFilter = QCheckBox(
self.tr("Sort items in filters (slow)"), self)
self.sortFilter.setChecked(settings['sort_filter'])
layout.addRow(self.sortFilter)
self.sortTree = QCheckBox(
self.tr("Sort items in tree (slow)"), self)
self.sortTree.setChecked(settings['sort_tree'])
layout.addRow(self.sortTree)
self.imageName = QCheckBox(
self.tr("Point to image file rather than storing image"), self)
self.imageName.setChecked(settings['image_name'])
layout.addRow(self.imageName)
self.imagePath = QLineEdit(self)
self.imagePath.setMinimumWidth(120)
self.imagePath.setText(settings['image_path'])
icon = style.standardIcon(QStyle.SP_DialogOpenButton)
self.imagePathButton = QPushButton(icon, '', self)
self.imagePathButton.clicked.connect(self.imagePathButtonClicked)
hLayout = QHBoxLayout()
hLayout.addWidget(self.imagePath)
hLayout.addWidget(self.imagePathButton)
hLayout.setContentsMargins(QMargins())
layout.addRow(self.tr("Image Path"), hLayout)
self.idDates = QCheckBox(
self.tr("Tellico import: keep ID and created/modified dates"), self)
self.idDates.setChecked(settings['id_dates'])
layout.addRow(self.idDates)
current = 0
self.templateSelector = QComboBox(self)
for i, template in enumerate(Report.scanTemplates()):
self.templateSelector.addItem(template)
|
Salamek/DwaPython
|
tests/AllTests.py
|
Python
|
gpl-3.0
| 973
| 0.00925
|
# Copyright (C) 2014 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Ge
|
neral Public Li
|
cense for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
__author__="Adam Schubert <adam.schubert@sg1-game.net>"
__date__ ="$12.10.2014 1:49:02$"
from tests.UserTest import *
from tests.ApiTest import *
from tests.ClanTest import *
from tests.CharacterTest import *
from tests.ServerTest import *
from tests.FactionTest import *
|
xlvector/yoyo-migrations
|
yoyo/tests/test_parse_uri.py
|
Python
|
bsd-3-clause
| 1,061
| 0.001885
|
from yoyo.connections import parse_uri, unparse_uri
def _test_parse_uri(connection_string, expected_uri_tuple):
uri_tuple = parse_uri(connection_string)
assert isinstance(uri_tuple, tuple)
assert (uri_tuple == expected_uri_tuple)
def _test_unparse_uri(uri_tuple, expected_connection_string):
connection_string = unparse_uri(uri_tuple)
assert isinstance(connection_string, str)
|
assert (connection_string == expected_connection_string)
def test_uri_without_db_params():
connection_string = 'postgres://user:password@server:7777/database'
uri_tuple = ('postgres', 'user', 'password', 'server', 7777, 'database', None)
_test_parse_uri(connection_string, uri_tuple)
_test_unparse_uri(uri_tuple, connection_string)
def test_parse_uri
|
_with_db_params():
connection_string = 'odbc://user:password@server:7777/database?DSN=dsn'
uri_tuple = ('odbc', 'user', 'password', 'server', 7777, 'database', {'DSN': 'dsn'})
_test_parse_uri(connection_string, uri_tuple)
_test_unparse_uri(uri_tuple, connection_string)
|
DigitalCampus/oppia-data-scripts-hews-2015
|
scripts/hews-quiz-progress-threshold.py
|
Python
|
gpl-3.0
| 5,647
| 0.022844
|
import argparse
import json
import datetime
import os
import sys
from codecs import open
def run(cohort_id, threshold, period, course_range):
from django.contrib.auth.models import User
from django.db.models import Sum, Max, Min, Avg
from django.utils.html import strip_tags
from oppia.models import Activity, Course, Cohort, CourseCohort, Participant, Tracker
from oppia.quiz.models import Quiz, QuizQuestion, QuizAttempt, QuizAttemptResponse
print "Cohort:" + str(cohort_id)
print "Threshold: " + str (threshold)
print "Period: " + period
print "Course Range: " + course_range
if period == 'project':
START_DATE = datetime.datetime(2015,4,01,0,0,0)
END_DATE = datetime.datetime(2016,10,31,23,59,59)
elif period == 'training':
START_DATE = datetime.datetime(2015,4,01,0,0,0)
END_DATE = datetime.datetime(2015,7,31,23,59,59)
elif period == 'cpd':
START_DATE = datetime.datetime(2015,8,01,0,0,0)
END_DATE = datetime.datetime(2016,10,31,23,59,59)
elif period == 'op3.4-mar16':
START_DATE = datetime.datetime(2015,8,01,0,0,0)
END_DATE = datetime.datetime(2016,03,31,23,59,59)
else:
print "Invalid period supplied"
sys.exit()
students = User.objects.filter(participant__cohort_id=cohort_id, participant__role=Participant.STUDENT).order_by('username')
if course_range == 'ancpnc':
courses = Course.objects.filter(coursecohort__cohort_id = cohort_id, shortname__in=['anc1-et','anc2-et','pnc-et'])
elif course_range == 'anc':
courses = Course.objects.filter(coursecohort__cohort_id = cohort_id, shortname__in=['anc1-et','anc2-et'])
elif course_range == '
|
pnc':
courses = Course.objects.filter(coursecohort__cohort_id = cohort_id, shortname__in=['pnc-et'])
elif course_range =='all':
courses = Course.objects.filter(coursecohort__cohort_id = cohort_id)
else:
print "Invalid course range supplied"
sys.exit()
filename = 'hew-quiz-progress-' + period + '-' + course_range + '-' + str(threshold) + '.html'
output_file = os.path.join(os.path.di
|
rname(os.path.realpath(__file__)), '..', '_output', filename)
out_file = open(output_file, 'w', 'utf-8')
out_file.write("<html>")
out_file.write("<head>")
out_file.write('<meta http-equiv="Content-Type" content="text/html;charset=utf-8" />')
out_file.write("<style> td {text-align:center;} #footer { font-size:small; font-style:italic; } </style>")
out_file.write("</head>")
out_file.write("<body>")
out_file.write("<h3>Courses: %s</h3>" % ','.join(courses.values_list('shortname', flat=True)))
out_file.write("<h3>Quiz pass threshold set at: %d%%</h3>" % threshold)
out_file.write("<h3>Date range: %s to %s</h3>" % (START_DATE.strftime('%d-%b-%Y'), END_DATE.strftime('%d-%b-%Y')))
out_file.write("<table>")
out_file.write("<tr>")
out_file.write("<th>Student</th>")
out_file.write("<th>No Quizzes</th>")
out_file.write("<th>No Attempted</th>")
out_file.write("<th>No Passed</th>")
out_file.write("</tr>")
for s in students:
print s.first_name + " " + s.last_name
out_file.write("<tr>")
out_file.write("<td>%s %s</td>" % (s.first_name, s.last_name))
no_quizzes = 0
no_attempted = 0
no_passed = 0
for c in courses:
# other quizzes - no times taken, max score, min score, first score, most recent score, average score
act_quizzes = Activity.objects.filter(section__course=c, baseline=False, type="quiz")
no_quizzes += act_quizzes.count()
quiz_digests = act_quizzes.values_list('digest', flat=True).distinct()
quizzes = Quiz.objects.filter(quizprops__name='digest', quizprops__value__in=quiz_digests)
for q in quizzes:
qas = QuizAttempt.objects.filter(quiz=q,user=s).aggregate(user_max_score=Max('score'), max_score=Max('maxscore'))
print qas
if qas['user_max_score'] is not None:
no_attempted += 1
if qas['user_max_score'] * 100/ qas['max_score'] >= threshold:
no_passed += 1
out_file.write("<td>%d</td>" % no_quizzes)
out_file.write("<td>%d</td>" % no_attempted)
out_file.write("<td>%d</td>" % no_passed)
out_file.write("</tr>\n")
out_file.write("</table>")
out_file.write("<div id='footer'>Report generated at %s by script %s</div>" % (datetime.datetime.now().strftime('%Y-%m-%d %H:%M'),os.path.realpath(__file__)))
out_file.write("</body></html>")
out_file.close()
def title_lang(title,lang):
try:
titles = json.loads(title)
if lang in titles:
return titles[lang]
else:
for l in titles:
return titles[l]
except:
pass
return title
if __name__ == "__main__":
import django
django.setup()
parser = argparse.ArgumentParser()
parser.add_argument("--cohort_id", help="", type=int)
parser.add_argument("--threshold", help="", type=int)
parser.add_argument("--period", help="", choices=['project','training','cpd','op3.4-mar16'])
parser.add_argument("--course_range", help="", choices=['all','ancpnc','anc', 'pnc'])
args = parser.parse_args()
run(args.cohort_id, args.threshold, args.period, args.course_range)
|
monikagrabowska/osf.io
|
kinto/tests/core/resource/test_pagination.py
|
Python
|
apache-2.0
| 10,656
| 0
|
import random
from base64 import b64encode, b64decode
import mock
from six.moves.urllib.parse import parse_qs, urlparse
from pyramid.httpexceptions import HTTPBadRequest
from kinto.core.utils import json
from . import BaseTest
class PaginationTest(BaseTest):
def setUp(self):
super(PaginationTest, self).setUp()
self.patch_known_field.start()
indices = list(range(20))
random.shuffle(indices)
for i in indices:
record = {
'title': 'MoFo #{0:02}'.format(i),
'status': i % 4,
'unread': (i % 2 == 0)
}
self.model.create_record(record)
def _setup_next_page(self):
next_page = self.last_response.headers['Next-Page']
url_fragments = urlparse(next_page)
queryparams = parse_qs(url_fragments.query)
self.resource.request.GET['_token'] = queryparams['_token'][0]
self.resource.request.GET['_limit'] = queryparams['_limit'][0]
self.last_response.headers = {}
return queryparams
def test_return_data(self):
result = self.resource.collection_get()
self.assertEqual(len(result['data']), 20)
def test_handle_limit(self):
self.resource.request.GET = {'_limit': '10'}
result = self.resource.collection_get()
self.assertEqual(len(result['data']), 10)
def test_handle_forced_limit(self):
with mock.patch.dict(self.resource.request.registry.settings, [
('paginate_
|
by', 10)]):
result = self.resource.collection_get()
s
|
elf.assertEqual(len(result['data']), 10)
def test_forced_limit_has_precedence_over_provided_limit(self):
with mock.patch.dict(self.resource.request.registry.settings, [
('paginate_by', 5)]):
self.resource.request.GET = {'_limit': '10'}
result = self.resource.collection_get()
self.assertEqual(len(result['data']), 5)
def test_return_next_page_url_is_given_in_headers(self):
self.resource.request.GET = {'_limit': '10'}
self.resource.collection_get()
self.assertIn('Next-Page', self.last_response.headers)
def test_next_page_url_has_got_querystring(self):
self.resource.request.GET = {'_limit': '10'}
self.resource.collection_get()
queryparams = self._setup_next_page()
self.assertIn('_limit', queryparams)
self.assertIn('_token', queryparams)
def test_next_page_url_gives_distinct_records(self):
self.resource.request.GET = {'_limit': '10'}
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
results_id1 = set([x['id'] for x in results1['data']])
results_id2 = set([x['id'] for x in results2['data']])
self.assertFalse(results_id1.intersection(results_id2))
def test_next_page_url_gives_distinct_records_with_forced_limit(self):
with mock.patch.dict(self.resource.request.registry.settings, [
('paginate_by', 5)]):
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
results_id1 = set([x['id'] for x in results1['data']])
results_id2 = set([x['id'] for x in results2['data']])
self.assertFalse(results_id1.intersection(results_id2))
def test_twice_the_same_next_page(self):
self.resource.request.GET = {'_limit': '10'}
self.resource.collection_get()
first_next = self.last_response.headers['Next-Page']
self.resource.collection_get()
second_next = self.last_response.headers['Next-Page']
self.assertEqual(first_next, second_next)
def test_stops_giving_next_page_at_the_end_of_first_page(self):
self.resource.collection_get()
self.assertNotIn('Next-Page', self.last_response.headers)
def test_stops_giving_next_page_at_the_end_sets(self):
self.resource.request.GET = {'_limit': '11'}
self.resource.collection_get()
self._setup_next_page()
self.resource.collection_get()
self.assertNotIn('Next-Page', self.last_response.headers)
def test_stops_giving_next_page_at_the_end_sets_on_exact_limit(self):
self.resource.request.GET = {'_limit': '10'}
self.resource.collection_get()
self._setup_next_page()
self.resource.collection_get()
self.assertNotIn('Next-Page', self.last_response.headers)
def test_handle_simple_sorting(self):
self.resource.request.GET = {'_sort': '-status', '_limit': '20'}
expected_results = self.resource.collection_get()
self.resource.request.GET['_limit'] = '10'
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
self.assertEqual(expected_results['data'],
results1['data'] + results2['data'])
def test_handle_multiple_sorting(self):
self.resource.request.GET = {'_sort': '-status,title', '_limit': '20'}
expected_results = self.resource.collection_get()
self.resource.request.GET['_limit'] = '10'
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
self.assertEqual(expected_results['data'],
results1['data'] + results2['data'])
def test_handle_filtering_sorting(self):
self.resource.request.GET = {'_sort': '-status,title', 'status': '2',
'_limit': '20'}
expected_results = self.resource.collection_get()
self.resource.request.GET['_limit'] = '3'
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
self.assertEqual(expected_results['data'],
results1['data'] + results2['data'])
def test_handle_sorting_desc(self):
self.resource.request.GET = {'_sort': 'status,-title', '_limit': '20'}
expected_results = self.resource.collection_get()
self.resource.request.GET['_limit'] = '10'
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
self.assertEqual(expected_results['data'],
results1['data'] + results2['data'])
def test_handle_since(self):
self.resource.request.GET = {'_since': '123', '_limit': '20'}
expected_results = self.resource.collection_get()
self.resource.request.GET['_limit'] = '10'
results1 = self.resource.collection_get()
self._setup_next_page()
results2 = self.resource.collection_get()
self.assertEqual(expected_results['data'],
results1['data'] + results2['data'])
def test_wrong_limit_raise_400(self):
self.resource.request.GET = {'_since': '123', '_limit': 'toto'}
self.assertRaises(HTTPBadRequest, self.resource.collection_get)
def test_token_wrong_base64(self):
self.resource.request.GET = {'_since': '123', '_limit': '20',
'_token': '123'}
self.assertRaises(HTTPBadRequest, self.resource.collection_get)
def test_token_wrong_json(self):
self.resource.request.GET = {
'_since': '123', '_limit': '20',
'_token': b64encode('{"toto":'.encode('ascii')).decode('ascii')}
self.assertRaises(HTTPBadRequest, self.resource.collection_get)
def test_token_wrong_json_fields(self):
badtoken = '{"toto": {"tutu": 1}}'
self.resource.request.GET = {
'_since': '123', '_limit': '20',
'_token': b64encode(badtoken.encode('ascii')).decode('ascii')}
self.assertRaises(HTTPBadRequest, self.resource.collection_get)
def test_raises_bad_request_if_token_has_bad_data_structure(self):
invalid_token = json.dumps([[('last_modified', 0, '>')]])
self.resource.request.GET = {
'_since':
|
simsong/grr-insider
|
gui/views.py
|
Python
|
apache-2.0
| 7,359
| 0.009512
|
#!/usr/bin/env python
"""Main Django renderer."""
import os
import pdb
import time
from django import http
from django import shortcuts
from django import template
from django.views.decorators import csrf
import logging
from grr import gui
from grr.gui import renderers
from grr.gui import webauth
from grr.lib import access_control
from grr.lib import aff4
from grr.lib import config_lib
from grr.lib import flags
from grr.lib import rdfvalue
from grr.lib import registry
from grr.lib import stats
config_lib.DEFINE_string("AdminUI.page_title",
"GRR Admin Console",
"Page title of the Admin UI.")
config_lib.DEFINE_string("AdminUI.heading",
"GRR Rapid Response",
"Dashboard heading displayed in the Admin UI.")
config_lib.DEFINE_string("AdminUI.report_url",
"http://code.google.com/p/grr/issues/list",
"URL of the 'Report a problem' link.")
config_lib.DEFINE_string("AdminUI.help_url",
"https://code.google.com/p/grr/",
"URL of the 'Help' link.")
DOCUMENT_ROOT = os.path.join(os.path.dirname(gui.__file__), "static")
class ViewsInit(registry.InitHook):
pre = ["StatsInit"]
def RunOnce(self):
"""Run this once on init."""
# Renderer-aware metrics
stats.STATS.RegisterEventMetric(
"ui_renderer_latency", fields=[("renderer", str)])
stats.STATS.RegisterEventMetric(
"ui_renderer_response_size", fields=[("renderer", str)],
units=stats.MetricUnits.BYTES)
stats.STATS.RegisterCounterMetric(
"ui_renderer_failure", fields=[("renderer", str)])
# General metrics
stats.STATS.RegisterCounterMetric("ui_unknown_renderer")
stats.STATS.RegisterCounterMetric("http_access_denied")
stats.STATS.RegisterCounterMetric("http_server_error")
@webauth.SecurityCheck
@csrf.ensure_csrf_cookie # Set the csrf cookie on the homepage.
def Homepage(request):
"""Basic handler to render the index page."""
# We build a list of all js files to include by looking at the list
# of renderers modules. JS files are always named in accordance with
# renderers modules names. I.e. if there's a renderers package called
# grr.gui.plugins.acl_manager, we expect a js files called acl_manager.js.
renderers_js_files = set()
for cls in renderers.Renderer.classes.values():
if aff4.issubclass(cls, renderers.Renderer) and cls.__module__:
renderers_js_files.add(cls.__module__.split(".")[-1] + ".js")
context = {"page_title": config_lib.CONFIG["AdminUI.page_title"],
"heading": config_lib.CONFIG["AdminUI.heading"],
"report_url": config_lib.CONFIG["AdminUI.report_url"],
"help_url": config_lib.CONFIG["AdminUI.help_url"],
"renderers_js": renderers_js_files}
return shortcuts.render_to_response(
"base.html", context, context_instance=template.RequestContext(request))
@webauth.SecurityCheck
def RenderBinaryDownload(request):
"""Basic handler to allow downloads of aff4:/config/executables files."""
path, filename = request.path.split("/", 2)[-1].rsplit("/", 1)
if not path or not filename:
return AccessDenied("Error: Invalid path.")
request.REQ = request.REQUEST
def Generator():
with aff4.FACTORY.Open(aff4_path, aff4_type="GRRSignedBlob",
token=BuildToken(request, 60)) as fd:
while True:
data = fd.Read(1000000)
if not data: break
yield data
base_path = rdfvalue.RDFURN("aff4:/config/executables")
aff4_path = base_path.Add(path).Add(filename)
if not aff4_path.RelativeName(base_path):
# Check for path traversals.
return AccessDenied("Error: Invalid path.")
filename = aff4_path.Basename()
response = http.HttpResponse(content=Generator(),
content_type="binary/octet-stream")
response["Content-Disposition"] = ("attachment; filename=%s" % filename)
return response
@webauth.SecurityCheck
@renderers.ErrorHandler()
def RenderGenericRenderer(request):
"""Django handler for rendering registered GUI Elements."""
try:
action, renderer_name = request.path.split("/")[-2:]
renderer_cls = renderers.Renderer.GetPlugin(name=renderer_name)
except KeyError:
stats.STATS.IncrementCounter("ui_unknown_renderer")
return AccessDenied("Error: Renderer %s not found" % renderer_name)
# Check that the ac
|
tion is valid
["Layout", "RenderAjax", "Download", "Validate"].index(action)
renderer = renderer_cls()
result = http.HttpResponse(content_type="text/html")
# Pass the request only from POST parameters. It is much more convenient to
# deal with normal dicts than Django'
|
s Query objects so we convert here.
if flags.FLAGS.debug:
# Allow both POST and GET for debugging
request.REQ = request.POST.dict()
request.REQ.update(request.GET.dict())
else:
# Only POST in production for CSRF protections.
request.REQ = request.POST.dict()
# Build the security token for this request
request.token = BuildToken(request, renderer.max_execution_time)
# Allow the renderer to check its own ACLs.
renderer.CheckAccess(request)
try:
# Does this renderer support this action?
method = getattr(renderer, action)
start_time = time.time()
try:
result = method(request, result) or result
finally:
total_time = time.time() - start_time
stats.STATS.RecordEvent("ui_renderer_latency",
total_time, fields=[renderer_name])
except access_control.UnauthorizedAccess, e:
result = http.HttpResponse(content_type="text/html")
result = renderers.Renderer.GetPlugin("UnauthorizedRenderer")().Layout(
request, result, exception=e)
except Exception:
stats.STATS.IncrementCounter("ui_renderer_failure",
fields=[renderer_name])
if flags.FLAGS.debug:
pdb.post_mortem()
raise
if not isinstance(result, http.HttpResponse):
raise RuntimeError("Renderer returned invalid response %r" % result)
# Prepend bad json to break json script inclusion attacks.
content_type = result.get("Content-Type", 0)
if content_type and "json" in content_type.lower():
result.content = ")]}\n" + result.content
return result
def BuildToken(request, execution_time):
"""Build an ACLToken from the request."""
token = access_control.ACLToken(
username=request.user,
reason=request.REQ.get("reason", ""),
process="GRRAdminUI",
expiry=rdfvalue.RDFDatetime().Now() + execution_time)
for field in ["REMOTE_ADDR", "HTTP_X_FORWARDED_FOR"]:
remote_addr = request.META.get(field, "")
if remote_addr:
token.source_ips.append(remote_addr)
return token
def AccessDenied(message):
"""Return an access denied Response object."""
response = shortcuts.render_to_response("404.html", {"message": message})
logging.warn(message)
response.status_code = 403
stats.STATS.IncrementCounter("http_access_denied")
return response
def ServerError(unused_request, template_name="500.html"):
"""500 Error handler."""
stats.STATS.IncrementCounter("http_server_error")
response = shortcuts.render_to_response(template_name)
response.status_code = 500
return response
|
jbowes/yselect
|
test/fakes/repository.py
|
Python
|
gpl-2.0
| 1,532
| 0.001305
|
# yselect - An RPM/Yum pa
|
ckage handling frontend.
# Copyright (C) 2006 James Bowes <jbowes@redhat.com>
# Copyright (C) 2006 Devan Goodwin <dg@fnordia.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is d
|
istributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
"""
Fake Repository object.
Copyright (C) 2006 James Bowes <jbowes@redhat.com>
"""
__revision__ = "$Rev$"
class Repository:
"""
Fake Repository object.
"""
def __init__(self):
pass
def getPackage(self):
"""
Pretend to download a package.
Return the location of the downloaded package.
"""
pass
def getHeader(self):
"""
Pretend to download a package header.
Return the location of the downloaded header.
"""
pass
def getPackageSack(self):
""" Return the PackageSack for this Repository. """
pass
|
eisen-dev/eisen_engine
|
core/mysql_config.py
|
Python
|
gpl-3.0
| 1,454
| 0.003439
|
# (c) 2015, Alice Ferrazzi <alice.ferrazzi@gmail.com>
#
# This file is part of Eisen
#
# Eisen is free softwa
|
re: y
|
ou can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eisen is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Eisen. If not, see <http://www.gnu.org/licenses/>.
from sqlalchemy import *
from bin.config_module import Config
import time
def start_engine():
engine = create_engine(Config.SQLALCHEMY_DATABASE_URI,
echo=True)
metadata = MetaData(bind=engine)
return engine, metadata
def sendTaskToDb(engine, metadata, connection, task, target_host):
while task.ready() is False:
time.sleep(1)
tasks_result = str(task.get())
repository_package = Table('task_result', metadata, autoload=True,
autoload_with=engine)
stmt = repository_package.insert()
connection.execute(
stmt,
task_id=task,
task_result=tasks_result,
target_host=target_host,
).execution_options(autocommit=True)
connection.close()
|
elationfoundation/PyOrgMode
|
build/lib/PyOrgMode/test_clock.py
|
Python
|
gpl-3.0
| 593
| 0.003373
|
import PyOrgMode
import time
import unittest
class TestClockElement
|
(unittest.TestCase):
def test_duration_format(self):
"""Durations are formatted identically to org-mode"""
for hour in '0', '1', '5', '10', '12', '13', '19', '23':
for minute in '00', '01', '29', '40', '59':
orig_str = '%s:%s' % (hour, minute)
orgdate_element = PyOrgMode.OrgDate(orig_str)
formatted_str = orgdate_element.get_value()
self.assertEqual(formatted_str, orig_st
|
r)
if __name__ == '__main__':
unittest.main()
|
nikste/visualizationDemo
|
zeppelin-web/node/npm/node_modules/node-gyp/gyp/pylib/gyp/win_tool.py
|
Python
|
apache-2.0
| 6,979
| 0.006018
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions for Windows builds.
These functions are executed via gyp-win-tool when using the ninja generator.
"""
from ctypes import windll, wintypes
import os
import shutil
import subprocess
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def main(args):
executor = WinTool()
exit_code = executor.Dispatch(args)
if exit_code is not None:
sys.exit(exit_code)
class LinkLock(object):
"""A flock-style lock to limit the number of concurrent links to one.
Uses a session-local mutex based on the file's directory.
"""
def __enter__(self):
name = 'Local\\%s' % BASE_DIR.replace('\\', '_').replace(':', '_')
self.mutex = windll.kernel32.CreateMutexW(
wintypes.c_int(0),
wintypes.c_int(0),
wintypes.create_unicode_buffer(name))
assert self.mutex
result = windll.kernel32.WaitForSingleObject(
self.mutex, wintypes.c_int(0xFFFFFFFF))
# 0x80 means another process was killed without releasing the mutex, but
# that this process has been given ownership. This is fine for our
# purposes.
assert result in (0, 0x80), (
"%s, %s" % (result, windll.kernel32.GetLastError()))
def __exit__(self, type, value, traceback):
windll.kernel32.ReleaseMutex(self.mutex)
windll.kernel32.CloseHandle(self.mutex)
class WinTool(object):
"""This class performs all the Windows tooling steps. The methods can either
be executed directly, or dispatched from an argument list."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
return getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like recursive-mirror to RecursiveMirror."""
return name_string.title().replace('-', '')
def _GetEnv(self, arch):
"""Gets the saved environment from a file for a given architecture."""
# The environment is saved as an "environment block" (see CreateProcess
# and msvs_emulation for details). We convert to a dict here.
# Drop last 2 NULs, one for list terminator, one for trailing vs. separator.
pairs = open(arch).read()[:-2].split('\0')
kvs = [item.split('=', 1) for item in pairs]
return dict(kvs)
def ExecStamp(self, path):
"""Simple stamp command."""
open(path, 'w').close()
def ExecRecursiveMirror(self, source, dest):
"""Emulation of rm -rf out && cp -af in out."""
if os.path.exists(dest):
if os.path.isdir(dest):
shutil.rmtree(dest)
else:
os.unlink(dest)
if os.path.isdir(source):
shutil.copytree(source, dest)
else:
shutil.copy2(source, dest)
def ExecLinkWrapper(self, arch, *args):
"""Filter diagnostic output from link that looks like:
' Creating library ui.dll.lib and object ui.dll.exp'
This happens when there are exports from the dll or exe.
"""
with LinkLock():
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if not line.startswith(' Creating library '):
print line
return popen.returncode
def ExecManifestWrapper(self, arch, *args):
"""Run manifest tool with environment set. Strip out undesirable warning
(some XML blocks are recognized by the OS loader, but not the manifest
tool)."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if line and 'manifest authoring warning 81010002' not in line:
print line
return popen.returncode
def ExecMidlWrapper(self, arch, outdir, tlb, h, dlldata, iid, proxy, idl,
*flags):
"""Filter noisy filenames output from MIDL compile step that isn't
quietable via command line flags.
"""
args = ['midl', '/nologo'] + list(flags) + [
'/out', outdir,
'/tlb', tlb,
'/h', h,
'/dlldata', dlldata,
'/iid', iid,
'/proxy', proxy,
idl]
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
# Filter junk out of stdout, and write filtered versions. Output we want
# to filter is pairs of lines that look like this:
# Processing C:\Program Files (x86)\Microsoft SDKs\...\include\objidl.
|
idl
# objidl.idl
lines = out.splitlines()
prefix = 'Processing '
processing = set(os.path.basename(x) for x in lines if x.startswith(prefix))
for line in lines:
if not line.startswith(prefix) and line not in processing:
print line
return popen.returncode
def ExecAsmWrapper(self, arch, *args):
"""Fi
|
lter logo banner from invocations of asm.exe."""
env = self._GetEnv(arch)
# MSVS doesn't assemble x64 asm files.
if arch == 'environment.x64':
return 0
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Copyright (C) Microsoft Corporation') and
not line.startswith('Microsoft (R) Macro Assembler') and
not line.startswith(' Assembling: ') and
line):
print line
return popen.returncode
def ExecRcWrapper(self, arch, *args):
"""Filter logo banner from invocations of rc.exe. Older versions of RC
don't support the /nologo flag."""
env = self._GetEnv(arch)
popen = subprocess.Popen(args, shell=True, env=env,
stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
out, _ = popen.communicate()
for line in out.splitlines():
if (not line.startswith('Microsoft (R) Windows (R) Resource Compiler') and
not line.startswith('Copyright (C) Microsoft Corporation') and
line):
print line
return popen.returncode
def ExecActionWrapper(self, arch, rspfile, *dir):
"""Runs an action command line from a response file using the environment
for |arch|. If |dir| is supplied, use that as the working directory."""
env = self._GetEnv(arch)
args = open(rspfile).read()
dir = dir[0] if dir else None
popen = subprocess.Popen(args, shell=True, env=env, cwd=dir)
popen.wait()
return popen.returncode
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
south-coast-science/scs_dfe_eng
|
src/scs_dfe/interface/component/cat24c32.py
|
Python
|
mit
| 2,699
| 0.005187
|
"""
Created on 5 Sep 2016
@author: Bruno Beloff (bruno.beloff@southcoastscience.com)
in /boot/config.txt
# RPi...
# Uncomment for i2c-0 & i2c-3 access (EEPROM programming)
# dtparam=i2c_vc=on
dtoverlay i2c-gpio i2c_gpio_sda=0 i2c_gpio_scl=1
"""
import time
from scs_core.sys.eeprom_image import EEPROMImage
from scs_host.bus.i2c import I2C
from scs_host.sys.host import Host
# --------------------------------------------------------------------------------------------------------------------
class CAT24C32(object):
"""
Semiconductor Components Indust
|
ries CAT24C32 32-Kb S
|
erial EEPROM
"""
SIZE = 0x1000 # 4096 bytes
__BUFFER_SIZE = 32
__TWR = 0.005 # seconds
# ----------------------------------------------------------------------------------------------------------------
@classmethod
def __read_image(cls, addr, count):
try:
I2C.EEPROM.start_tx(Host.DFE_EEPROM_ADDR)
content = I2C.EEPROM.read_cmd16(addr, count)
return EEPROMImage(content)
finally:
I2C.EEPROM.end_tx()
@classmethod
def __write_image(cls, addr, values): # max 32 values
try:
I2C.EEPROM.start_tx(Host.DFE_EEPROM_ADDR)
I2C.EEPROM.write_addr16(addr, *values)
time.sleep(cls.__TWR)
finally:
I2C.EEPROM.end_tx()
# ----------------------------------------------------------------------------------------------------------------
def __init__(self):
"""
initialise with current EEPROM contents
"""
self.__image = self.__read_image(0, CAT24C32.SIZE)
# ----------------------------------------------------------------------------------------------------------------
def write(self, image):
# verify...
if len(image) != CAT24C32.SIZE:
raise ValueError("CAT24C32.write: image has incorrect length.")
addr = 0
# write...
while addr < len(image.content):
values = image.content[addr: addr + CAT24C32.__BUFFER_SIZE]
self.__write_image(addr, values)
addr += CAT24C32.__BUFFER_SIZE
# reload...
self.__image = self.__read_image(0, CAT24C32.SIZE)
# ----------------------------------------------------------------------------------------------------------------
@property
def image(self):
return self.__image
# ----------------------------------------------------------------------------------------------------------------
def __str__(self, *args, **kwargs):
return "CAT24C32:{image:%s}" % self.image
|
NGSegovia/wsgi-intercept
|
wsgi_intercept/mechanize_intercept/wsgi_browser.py
|
Python
|
mit
| 1,068
| 0.005618
|
"""
A mechanize browser that redirects specified HTTP connections to a WSGI
object.
"""
from httplib import HTTP
from mechanize import Browser as MechanizeBrowser
from wsgi_intercept.urllib2_intercept import install_opener, uninstall_opener
try:
from mechanize import HTTPHandler
except ImportError:
# pre mechanize 0.1.0 it was a separate package
# (this will break if it is combined with a newer mechanize)
from ClientCookie import HTTPHandler
import sys, os.path
from wsgi_intercept.urllib2_intercept import WSGI_HTTPHandler, WSGI_HTTPSHandler
class Browser(MechanizeBrowser):
"""
A version of the mechanize browser class that
installs the WSGI intercept handler
"""
handler_classes = MechanizeBrowser.handler_classes.copy()
handler_classes['http'] = WSGI_HTTPHandler
handler_classes['https'] = WSGI_HTTPSHandler
def __init__(self, *args, **kwargs):
# install WSGI intercept handler.
|
install(self)
MechanizeBrowser.__init__(se
|
lf, *args, **kwargs)
def install(browser):
install_opener()
|
xflows/textflows
|
mothra/wsgi.py
|
Python
|
mit
| 1,305
| 0.002299
|
"""
WSGI config for mothra project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
import site
import djcelery
djcelery.setup_loader()
project_path = '/var/www/textflows'
if project_path not in sys.path:
sys.path.append(project_path)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "mothra.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development ser
|
ver, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplicatio
|
n(application)
|
ljwolf/pysal
|
pysal/spreg/error_sp_hom.py
|
Python
|
bsd-3-clause
| 57,745
| 0.001039
|
'''
Hom family of models based on: [Drukker2013]_
Following: [Anselin2011]_
'''
__author__ = "Luc Anselin luc.anselin@asu.edu, Daniel Arribas-Bel darribas@asu.edu"
from scipy import sparse as SP
import numpy as np
from numpy import linalg as la
import ols as OLS
from pysal import lag_spatial
from utils import power_expansion, set_endog, iter_msg, sp_att
from utils import get_A1_hom, get_A2_hom, get_A1_het, optim_moments
from utils import get_spFilter, get_lags, _moments2eqs
from utils import spdot, RegressionPropsY, set_warn
import twosls as TSLS
import user_output as USER
import summary_output as SUMMARY
__all__ = ["GM_Error_Hom", "GM_Endog_Error_Hom", "GM_Combo_Hom"]
class BaseGM_Error_Hom(RegressionPropsY):
'''
GMM method for a spatial error model with homoskedasticity (note: no
consistency checks, diagnostics or constant added); based on
Drukker et al. (2013) [Drukker2013]_, following Anselin (2011) [Anselin2011]_.
Parameters
----------
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, excluding the constant
w : Sparse matrix
Spatial weights sparse matrix
max_iter : int
Maximum number of iterations of steps 2a and 2b from Arraiz
et al. Note: epsilon provides an additional stop condition.
epsilon : float
Minimum change in lambda required to stop iterations of
steps
|
2a and 2b from Arraiz et al. Note: max_iter provides
an additional stop condition.
A1 : string
If A1='het', then the matrix A1 is defined as in Arraiz et
al. If A1='hom', then as in Anselin (2011) (default). If
A1='hom_sc' (default), then as in Drukker, Egger and Prucha (2010)
and Drukker, Prucha and Raciborski (2010).
Attributes
----------
betas : array
|
kx1 array of estimated coefficients
u : array
nx1 array of residuals
e_filtered : array
nx1 array of spatially filtered residuals
predy : array
nx1 array of predicted y values
n : integer
Number of observations
k : integer
Number of variables for which coefficients are estimated
(including the constant)
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, including the constant
iter_stop : string
Stop criterion reached during iteration of steps 2a and 2b
from Arraiz et al.
iteration : integer
Number of iterations of steps 2a and 2b from Arraiz et al.
mean_y : float
Mean of dependent variable
std_y : float
Standard deviation of dependent variable
vm : array
Variance covariance matrix (kxk)
sig2 : float
Sigma squared used in computations
xtx : float
X'X
Examples
--------
>>> import numpy as np
>>> import pysal
>>> db = pysal.open(pysal.examples.get_path('columbus.dbf'),'r')
>>> y = np.array(db.by_col("HOVAL"))
>>> y = np.reshape(y, (49,1))
>>> X = []
>>> X.append(db.by_col("INC"))
>>> X.append(db.by_col("CRIME"))
>>> X = np.array(X).T
>>> X = np.hstack((np.ones(y.shape),X))
>>> w = pysal.rook_from_shapefile(pysal.examples.get_path("columbus.shp"))
>>> w.transform = 'r'
Model commands
>>> reg = BaseGM_Error_Hom(y, X, w=w.sparse, A1='hom_sc')
>>> print np.around(np.hstack((reg.betas,np.sqrt(reg.vm.diagonal()).reshape(4,1))),4)
[[ 47.9479 12.3021]
[ 0.7063 0.4967]
[ -0.556 0.179 ]
[ 0.4129 0.1835]]
>>> print np.around(reg.vm, 4) #doctest: +SKIP
[[ 1.51340700e+02 -5.29060000e+00 -1.85650000e+00 -2.40000000e-03]
[ -5.29060000e+00 2.46700000e-01 5.14000000e-02 3.00000000e-04]
[ -1.85650000e+00 5.14000000e-02 3.21000000e-02 -1.00000000e-04]
[ -2.40000000e-03 3.00000000e-04 -1.00000000e-04 3.37000000e-02]]
'''
def __init__(self, y, x, w,
max_iter=1, epsilon=0.00001, A1='hom_sc'):
if A1 == 'hom':
wA1 = get_A1_hom(w)
elif A1 == 'hom_sc':
wA1 = get_A1_hom(w, scalarKP=True)
elif A1 == 'het':
wA1 = get_A1_het(w)
wA2 = get_A2_hom(w)
# 1a. OLS --> \tilde{\delta}
ols = OLS.BaseOLS(y=y, x=x)
self.x, self.y, self.n, self.k, self.xtx = ols.x, ols.y, ols.n, ols.k, ols.xtx
# 1b. GM --> \tilde{\rho}
moments = moments_hom(w, wA1, wA2, ols.u)
lambda1 = optim_moments(moments)
lambda_old = lambda1
self.iteration, eps = 0, 1
while self.iteration < max_iter and eps > epsilon:
# 2a. SWLS --> \hat{\delta}
x_s = get_spFilter(w, lambda_old, self.x)
y_s = get_spFilter(w, lambda_old, self.y)
ols_s = OLS.BaseOLS(y=y_s, x=x_s)
self.predy = spdot(self.x, ols_s.betas)
self.u = self.y - self.predy
# 2b. GM 2nd iteration --> \hat{\rho}
moments = moments_hom(w, wA1, wA2, self.u)
psi = get_vc_hom(w, wA1, wA2, self, lambda_old)[0]
lambda2 = optim_moments(moments, psi)
eps = abs(lambda2 - lambda_old)
lambda_old = lambda2
self.iteration += 1
self.iter_stop = iter_msg(self.iteration, max_iter)
# Output
self.betas = np.vstack((ols_s.betas, lambda2))
self.vm, self.sig2 = get_omega_hom_ols(
w, wA1, wA2, self, lambda2, moments[0])
self.e_filtered = self.u - lambda2 * w * self.u
self._cache = {}
class GM_Error_Hom(BaseGM_Error_Hom):
'''
GMM method for a spatial error model with homoskedasticity, with results
and diagnostics; based on Drukker et al. (2013) [Drukker2013]_, following Anselin
(2011) [Anselin2011]_.
Parameters
----------
y : array
nx1 array for dependent variable
x : array
Two dimensional array with n rows and one column for each
independent (exogenous) variable, excluding the constant
w : pysal W object
Spatial weights object
max_iter : int
Maximum number of iterations of steps 2a and 2b from Arraiz
et al. Note: epsilon provides an additional stop condition.
epsilon : float
Minimum change in lambda required to stop iterations of
steps 2a and 2b from Arraiz et al. Note: max_iter provides
an additional stop condition.
A1 : string
If A1='het', then the matrix A1 is defined as in Arraiz et
al. If A1='hom', then as in Anselin (2011). If
A1='hom_sc' (default), then as in Drukker, Egger and Prucha (2010)
and Drukker, Prucha and Raciborski (2010).
vm : boolean
If True, include variance-covariance matrix in summary
results
name_y : string
Name of dependent variable for use in output
name_x : list of strings
Names of independent variables for use in output
name_w : string
Name of weights matrix for use in output
name_ds : string
Name of dataset for use in output
Attributes
----------
summary : string
Summary of regression results and
|
sangwook236/general-development-and-testing
|
sw_dev/python/rnd/test/machine_learning/keras/keras_visualization.py
|
Python
|
gpl-2.0
| 3,637
| 0.02887
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from __future__ import print_function
import numpy as np
import tensorflow as tf
import matplotlib.pyplot as plt
# REF [site] >> https://machinelearningmastery.com/how-to-visualize-filters-and-feature-maps-in-convolutional-neural-networks/
def visualize_filters_in_CNN():
# Load the model.
model = tf.keras.applications.vgg16.VGG16()
# Summarize the model.
model.summary()
# Summarize filter shapes.
for layer in model.layers:
# Check for convolutional layer.
if 'conv' not in layer.name:
continue
# Get filter weights.
filters, biases = layer.get_weights()
print(layer.name, filters.shape)
#--------------------
# Retrieve weights from the second hidden layer.
filters, biases = model.layers[1].get_weights()
# Normalize filter values to 0-1 so we can visualize them.
f_min, f_max = filters.min(), filters.max()
filters = (filters - f_min) / (f_max - f_min)
# Plot first few filters.
n_filters, ix = 6, 1
for i in range(n_filters):
# Get the filter.
f = filters[:, :, :, i]
# Plot each channel separately.
for j in range(3):
# Specify subplot and turn of axis.
ax = plt.subplot(n_filters, 3, ix)
ax.set_xticks([])
ax.set_yticks([])
# Plot filter channel in grayscale.
plt.imshow(f[:, :, j], cmap='gray')
ix += 1
# Show the figure.
plt.show()
# REF [site] >> https://machinelearningmastery.com/how-to-visualize-filters-and-feature-maps-in-convolutional-neural-networks/
def visualize_feature_maps_in_CNN():
# Load the image with the required shape.
img = tf.keras.preprocessing.image.load_img('./bird.jpg', target_size=(224, 224))
# Convert the image to an array.
img = tf.keras.preprocessing.image.img_to_array(img)
# Expand dimensions so that it represents a single 'sample'.
img = np.expand_dims(img, axis=0)
|
# Prepare the image (e.g. scale pixel values for the vgg).
img = tf.keras.applications.vgg16.preprocess_input(img)
#--------------------
# Load the model.
model = tf.keras.applications.vgg16.VGG16()
# Redefine model to output right after the first hidden layer.
model = tf.keras.models.Model(inputs=model.inputs, outputs=model.layers[1].output)
model.summary()
# Get feature map for first hidden layer.
featur
|
e_maps = model.predict(img)
# Plot all 64 maps in an 8x8 squares.
square = 8
ix = 1
for _ in range(square):
for _ in range(square):
# Specify subplot and turn of axis.
ax = plt.subplot(square, square, ix)
ax.set_xticks([])
ax.set_yticks([])
# Plot filter channel in grayscale.
plt.imshow(feature_maps[0, :, :, ix-1], cmap='gray')
ix += 1
# Show the figure.
plt.show()
#--------------------
# Load the model.
model = tf.keras.applications.vgg16.VGG16()
# Redefine model to output right after hidden layers.
ixs = [2, 5, 9, 13, 17]
outputs = [model.layers[i].output for i in ixs]
model = tf.keras.models.Model(inputs=model.inputs, outputs=outputs)
# Get feature map for the hidden layers.
feature_maps = model.predict(img)
# Plot the output from each block.
square = 8
for fmap in feature_maps:
# Plot all 64 maps in an 8x8 squares.
ix = 1
for _ in range(square):
for _ in range(square):
# Specify subplot and turn of axis.
ax = plt.subplot(square, square, ix)
ax.set_xticks([])
ax.set_yticks([])
# Plot filter channel in grayscale.
plt.imshow(fmap[0, :, :, ix-1], cmap='gray')
ix += 1
# Show the figure.
plt.show()
def main():
#visualize_filters_in_CNN()
visualize_feature_maps_in_CNN()
#--------------------------------------------------------------------
if '__main__' == __name__:
main()
|
stkyle/libtaxii
|
docs/conf.py
|
Python
|
bsd-3-clause
| 945
| 0
|
import os
import libtaxii
project = u'libtaxi
|
i'
copyright = u'2014, The MITRE Corporation'
version = libtaxii.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.ifconfig',
'sphinx.ext.intersphinx',
'sphinx.ext.viewcode',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {
'python': ('http://docs.python.org/', None),
}
templates_path = ['_templ
|
ates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
]
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'libtaxii.tex', u'libtaxii Documentation',
u'The MITRE Corporation', 'manual'),
]
|
jcatw/scnn
|
scnn/graph_proportion_baseline_experiment.py
|
Python
|
mit
| 6,889
| 0.019016
|
__author__ = 'jatwood'
import sys
import numpy as np
from sklearn.metrics import f1_score, accuracy_score
from sklearn.linear_model import LogisticRegression
import data
import util
import kernel
import structured
from baseline_graph_experiment import GraphDecompositionModel
def graph_proportion_baseline_experiment(model_fn, data_fn, data_name, model_name, prop_valid, prop_test):
print 'Running node experiment (%s)...' % (data_name,)
A, X, Y = data_fn()
n_graphs = len(A)
A = np.asarray(A)
X = np.asarray(X)
Y = np.asarray(Y)
indices = np.arange(n_graphs)
np.random.seed(4)
np.random.shuffle(indices)
print indices
valid_start = int(n_graphs * (1 - (prop_valid + prop_test)))
test_start = int(n_graphs * (1 - prop_test))
valid_indices = indices[valid_start:test_start]
test_indices = indices[test_start:]
for train_prop in [x / 10.0 for x in range(1, 11)]:
train_end = int(valid_start * train_prop)
train_indices = indices[:train_end]
m = model_fn()
m.fit_with_validation(A, X, Y, train_indices, valid_indices)
preds = m.predict(A, X, test_indices)
actuals = Y[test_indices,:]
accuracy = accuracy_score(actuals, preds)
f1_micro = f1_score(actuals, preds, average='micro')
f1_macro = f1_score(actuals, preds, average='macro')
print 'form: name,micro_f,macro_f,accuracy'
print '###RESULTS###: %s,%s,%.6f,%.8f,%.8f,%.8f' % (data_name, model_name, train_prop, f1_micro, f1_macro, accuracy)
def graph_proportion_kernel_experiment(model, data_fn, data_name, model_name, prop_valid, prop_test):
print 'Running graph experiment (%s)...' % (data_name,)
print 'parsing data...'
A, X, Y = data_fn()
print 'done'
n_graphs = len(A)
A = np.asarray(A)
X = np.asarray(X)
Y = np.asarray(Y)
indices = np.arange(n_graphs)
np.random.seed(4)
np.random.shuffle(indices)
print indices
valid_start = int(n_graphs * (1 - (prop_valid + prop_test)))
test_start = int(n_graphs * (1 - prop_test))
valid_indices = indices[valid_start:test_start]
test_indices = indices[test_start:]
for train_prop in [x / 10.0 for x in range(1, 11)[::-1]]:
print 'train prop %s' % (train_prop,)
train_end = int(valid_start * train_prop)
train_indices = indices[:train_end]
model.fit_with_validation(Y, train_indices, valid_indices, test_indices)
preds = model.predict(Y, valid_indices, test_indices)
actuals = Y[test_indices,:]
accuracy = accuracy_score(actuals, preds)
f1_micro = f1_score(actuals, preds, average='micro')
f1_macro = f1_score(actuals, preds, average='macro')
print 'form: name,micro_f,macro_f,accuracy'
print '###RESULTS###: %s,%s,%.6f,%.8f,%.8f,%.8f' % (data_name, model_name, train_prop, f1_micro, f1_macro, accuracy)
if __name__ == '__main__':
np.random.seed()
args = sys.argv[1:]
name_to_data = {
'nci1': lambda: data.parse_nci(graph_name='nci1.graph'),
'nci109': lambda: data.parse_nci(graph_name='nci109.graph'),
'mutag': lambda : data.parse_nci(graph_name='mutag.graph'),
'ptc': lambda : data.parse_nci(graph_name='ptc.graph'),
'enzymes': lambda : data.parse_nci(graph_name='enzymes.graph'),
'nci1struct': lambda: data.parse_nci(graph_name='nci1.graph', with_structural_features=True),
'nci109struct': lambda: data.parse_nci(graph_name='nci109.graph', with_structural_features=True),
}
transform_lookup = {
'id': None,
'rwl': util.rw_laplacian,
'l': util.laplacian,
}
name_to_parameters = {
'nci1': {'num_dimensions':2,
'kernel_type':1,
'feature_type':3,
|
'ds_name':'nci1',
|
'window_size':2,
'ngram_type':0,
'sampling_type':1,
'graphlet_size':0,
'sample_size':2
},
'nci109': {'num_dimensions':5,
'kernel_type':1,
'feature_type':3,
'ds_name':'nci109',
'window_size':10,
'ngram_type':0,
'sampling_type':0,
'graphlet_size':0,
'sample_size':2
},
'mutag': {'num_dimensions':2,
'kernel_type':1,
'feature_type':3,
'ds_name':'mutag',
'window_size':2,
'ngram_type':0,
'sampling_type':1,
'graphlet_size':0,
'sample_size':2
},
'enzymes': {'num_dimensions':2,
'kernel_type':1,
'feature_type':3,
'ds_name':'enzymes',
'window_size':2,
'ngram_type':0,
'sampling_type':1,
'graphlet_size':0,
'sample_size':2
},
'ptc': {'num_dimensions':2,
'kernel_type':1,
'feature_type':3,
'ds_name':'ptc',
'window_size':2,
'ngram_type':0,
'sampling_type':1,
'graphlet_size':0,
'sample_size':2
},
'nci1struct': {'num_dimensions':2,
'kernel_type':1,
'feature_type':3,
'ds_name':'nci1',
'window_size':2,
'ngram_type':0,
'sampling_type':1,
'graphlet_size':0,
'sample_size':2
},
'nci109struct': {'num_dimensions':5,
'kernel_type':1,
'feature_type':3,
'ds_name':'nci109',
'window_size':10,
'ngram_type':0,
'sampling_type':0,
'graphlet_size':0,
'sample_size':2
},
}
data_name = args[0]
data_fn = name_to_data[data_name]
model_name = args[1]
baseline_models = {
'logisticl1': lambda: GraphDecompositionModel(reg='l1'),
'logisticl2': lambda: GraphDecompositionModel(reg='l2')
}
kernel_models = {
'deepwl': kernel.DeepWL(**name_to_parameters[data_name]),
}
data_name = args[0]
data_fn = name_to_data[data_name]
model_name = args[1]
if model_name in baseline_models:
graph_proportion_baseline_experiment(baseline_models[model_name], data_fn, data_name, model_name, 0.1, 0.1)
elif model_name in kernel_models:
graph_proportion_kernel_experiment(kernel_models[model_name], data_fn, data_name, model_name, 0.1, 0.1)
else:
print '%s not recognized' % (model_name,)
|
bcso/CS_234_Assignments
|
Assignment 2/Stock.py
|
Python
|
apache-2.0
| 3,006
| 0.024285
|
"""CS 234 Assignment 2 Question 1a - Testing assumptions: Stock.py
"""
class Stock:
"""
A data type representing a single stock
Fields: name - str: the company name as a string
symbol - str: a string uniquely identifying the stock
price - non-negative float: last/current price
low - non-negative float: lowest price of the day
high - non-negative float: highest price of the day
volume - non-negative int: number of shares traded
Floats are represented to two decimal places.
The constraint low <= price <= high is always satisfied.
"""
def __init__(self, aName, aSymbol, price = 0.0,\
low = 0.0, high = 0.0, volume = 0):
"""
Preconditions: aName and aSymbol are strs
Assumptions: if supplied price, low and high are non-negative floats
if supplied volume is a non-negative int
Strings aName and aSymbol cannot be empty
Variables aName and aSymbol have to be Strings
It is assumed that: low < price < high
Postconditions: construct a stock data type with aName and aSymbol
"""
if type(aSymbol) != type("s"):
raise TypeError, "The input symbol must be a string!"
if len(aSymbol) == 0:
raise TypeError, "The input symbol cannot be empty!"
elif type(aName) != type("s"):
raise TypeError, "The input name must be a string!"
if len(aName) == 0:
raise TypeError, "The input name cannot be empty!"
elif (price < 0) or (low < 0) or (high < 0) or (volume < 0):
raise ValueError, "Price, low, high and volume inputs must be non-negative!"
elif (low <= price) == False:
raise ValueError, "Low must be lower than price!"
elif (price <= high) == False:
|
raise ValueError, "Price must be lower than high!"
elif (low <= high) == False:
raise ValueError, "Low must be lower than high!"
elif (type(price) != type(0.0)) or (type(low) != type(0.0)) or (type(high) != type(0.0)):
raise ValueError, "Price, low and high inputs must be float inputs!"
self.name = aName
self.symbol = aSymbol
self.price = price
self.low = low
self.high
|
= high
self.volume = volume
def __repr__(self):
"""
Postcondition: return a string representation of Stock
"""
return "Stock(%s, %s, %.2f, %.2f, %.2f, %d)" % \
(self.name, self.symbol, self.price, \
self.low, self.high, self.volume)
def __eq__(self, rhs):
"""
Precondition: rhs is another Stock
Postcondition: returns True iff both stock symbols are identical
"""
return self.symbol == rhs.symbol
def __ne__(self, rhs):
"""
Precondition: rhs is another Stock
Postcondition: returns True iff both stock symbols are different
"""
return not(self==rhs)
|
6809/MC6809
|
MC6809/core/memory_info.py
|
Python
|
gpl-3.0
| 1,835
| 0
|
#!/usr/bin/env python
"""
DragonPy - base memory info
~~~~~~~~~~~~~~~~~~~~~~~~~~~
:created: 2013 by Jens Diemer - www.jensdiemer.de
:copyleft: 2013 by the MC6809 team, see AUTHORS for more details.
:license: GNU GPL v3 or above, see LICENSE for more details.
"""
import sys
class BaseMemoryInfo:
def __init__(self, out_func):
self.out_func = out_func
def get_shortest(self, addr):
shortest = None
size = sys.maxsize
for start, end, txt in self.MEM_INFO:
if not start <= addr <= end:
continue
current_size = abs(end - start)
if current_size < size:
size = current_size
shortest = start, end, txt
if shortest is None:
return f"${addr:x}: UNKNOWN"
start, end, txt = shortest
if start == end:
return f"${addr:x}: {txt}"
else:
return f"${addr:x}: ${start:x}-${end:x} - {txt}"
def __call__(self, addr, info="", shortest=True):
if shortest:
mem_info = self.get_shortest(addr)
if info:
self.out_func(f"{info}: {mem_info}")
else:
self.out_func(mem_info)
return
mem_info = []
for start, end, txt in self.MEM_INFO:
if start <= addr <= end:
mem_info.append(
(start, end, txt)
)
|
if not mem_info:
self.out_func(f"{info} ${addr:x}: UNKNOWN")
else:
self.out_func(f"{info} ${addr:x}:")
for start, end, txt in mem_info:
if start == end:
self.out_func(f" * ${
|
start:x} - {txt}")
else:
self.out_func(f" * ${start:x}-${end:x} - {txt}")
|
charleswhchan/huey
|
huey/api.py
|
Python
|
mit
| 16,245
| 0.000616
|
import datetime
import json
import pickle
import re
import time
import traceback
import uuid
from functools import wraps
from huey.backends.dummy import DummySchedule
from huey.exceptions import DataStoreGetException
from huey.exceptions import DataStorePutException
from huey.exceptions import DataStoreTimeout
from huey.exceptions import QueueException
from huey.exceptions import QueueReadException
from huey.exceptions import QueueRemoveException
from huey.exceptions import QueueWriteException
from huey.exceptions import ScheduleAddException
from huey.exceptions import ScheduleReadException
from huey.registry import registry
from huey.utils import EmptyData
from huey.utils import local_to_utc
from huey.utils import wrap_exception
class Huey(object):
"""
Huey executes tasks by exposing function decorators that cause the function
call to be enqueued for execution by the consumer.
Typically your application will only need one Huey instance, but you can
have as many as you like -- the only caveat is that one consumer process
must be executed for each Huey instance.
:param queue: a queue instance, e.g. ``RedisQueue()``
:param result_store: a place to store results, e.g. ``RedisResultStore()``
:param schedule: a place to store pending tasks, e.g. ``RedisSchedule()``
:param events: channel to send events on, e.g. ``RedisEventEmitter()``
:param store_none: Flag to indicate whether tasks that return ``None``
should store their results in the result store.
:param always_eager: Useful for testing, this will execute all tasks
immediately, without enqueueing them.
Example usage::
from huey.api import Huey, crontab
from huey.backends.redis_backend import RedisQueue, RedisDataStore, RedisSchedule
queue = RedisQueue('my-app')
result_store = RedisDataStore('my-app')
schedule = RedisSchedule('my-app')
huey = Huey(queue, result_store, schedule)
# This is equivalent to the previous 4 lines:
# huey = RedisHuey('my-app', {'host': 'localhost', 'port': 6379})
@huey.task()
def slow_function(some_arg):
# ... do something ...
return some_arg
@huey.periodic_task(crontab(minute='0', hour='3'))
def backup():
# do a backup every day at 3am
return
"""
def __init__(self, queue, result_store=None, schedule=None, events=None,
store_none=False, always_eager=False):
self.queue = queue
self.result_store = result_store
self.schedule = schedule or DummySchedule(self.queue.name)
self.events = events
self.blocking = self.queue.blocking
self.store_none = store_none
self.always_eager = always_eager
def task(self, retries=0, retry_delay=0, retries_as_argument=False,
include_task=False, name=None):
def decorator(func):
"""
Decorator to execute a function out-of-band via the consumer.
"""
klass = create_task(
QueueTask,
func,
retries_as_argument,
name,
include_task)
def schedule(args=None, kwargs=None, eta=None, delay=None,
convert_utc=True, task_id=None):
if delay and eta:
raise ValueError('Both a delay and an eta cannot be '
'specified at the same time')
if delay:
eta = (datetime.datetime.now() +
datetime.timedelta(seconds=delay))
if convert_utc and eta:
eta = local_to_utc(eta)
cmd = klass(
(args or (), kwargs or {}),
execute_time=eta,
retries=retries,
retry_delay=retry_delay,
task_id=task_id)
return self.enqueue(cmd)
func.schedule = schedule
func.task_class = klass
@wraps(func)
def inner_run(*args, **kwargs):
cmd = klass(
(args, kwargs),
retries=retries,
retry_delay=retry_delay)
return self.enqueue(cmd)
inner_run.call_local = func
return inner_run
return decorator
def periodic_task(self, validate_datetime, name=None):
"""
Decorator to execute a function on a specific schedule.
"""
def decorator(func):
def method_validate(self, dt):
return validate_datetime(dt)
klass = create_task(
PeriodicQueueTask,
func,
task_name=name,
validate_datetime=method_validate,
)
func.task_class = klass
def _revoke(revoke_until=None, revoke_once=False):
self.revoke(klass(), revoke_until, revoke_once)
func.revoke = _revoke
def _is_revoked(dt=None, peek=True):
return self.is_revoked(klass(), dt, peek)
func.is_revoked = _is_revoked
def _restore():
return self.restore(klass())
func.restore = _restore
return func
return decorator
def _wrapped_operation(exc_class):
def decorator(fn):
def inner(*args, **kwargs):
try:
return fn(*args, **kwargs)
except:
wrap_exception(exc_class)
return inner
return decorator
@_wrapped_operation(QueueWriteException)
def _write(self, msg):
self.queue.write(msg)
@_wrapped_operation(QueueReadException)
def _read(self):
return self.queue.read()
@_wrapped_operation(QueueRemoveException)
|
def _remove(self, msg):
return self.queue.remove(msg)
@_wrapped_operation(DataStoreGetException)
def _get(self, key, peek=False):
if peek:
return self.result_store.peek(key)
else:
return self.result_store.get(key)
@_wrapped_operation(DataStorePutException)
def _put
|
(self, key, value):
return self.result_store.put(key, value)
@_wrapped_operation(ScheduleAddException)
def _add_schedule(self, data, ts):
if self.schedule is None:
raise AttributeError('Schedule not specified.')
self.schedule.add(data, ts)
@_wrapped_operation(ScheduleReadException)
def _read_schedule(self, ts):
if self.schedule is None:
raise AttributeError('Schedule not specified.')
return self.schedule.read(ts)
def emit(self, message):
"""Events should always fail silently."""
try:
self.events.emit(message)
except:
pass
def enqueue(self, task):
if self.always_eager:
return task.execute()
self._write(registry.get_message_for_task(task))
if self.result_store:
return AsyncData(self, task)
def dequeue(self):
message = self._read()
if message:
return registry.get_task_for_message(message)
def _format_time(self, dt):
if dt is None:
return None
return time.mktime(dt.timetuple())
def emit_task(self, status, task, error=False):
if self.events:
message_data = {'status': status}
message_data.update({
'id': task.task_id,
'task': type(task).__name__,
'retries': task.retries,
'retry_delay': task.retry_delay,
'execute_time': self._format_time(task.execute_time),
'error': error})
if error:
message_data['traceback'] = traceback.format_exc()
self.emit(json.dumps(message_data))
def execute(self, task):
if not isinstance(task, QueueTask):
raise TypeError('Unknown object: %s' % task)
result = task.execute()
|
DOV-Vlaanderen/pydov
|
tests/test_error_path.py
|
Python
|
mit
| 12,376
| 0
|
import datetime
import gzip
import os
import sys
import tempfile
import time
from importlib import reload
from subprocess import Popen
import numpy as np
import pytest
from owslib.fes import PropertyIsEqualTo
import pydov
from pydov.search.boring import BoringSearch
from pydov.search.grondwaterfilter import GrondwaterFilterSearch
from pydov.util.caching import GzipTextFileCache
from pydov.util.dovutil import build_dov_url
from pydov.util.errors import XmlFetchWarning, XmlStaleWarning, XsdFetchWarning
|
from pydov.util.hooks import Hooks
from tests.abstract import ServiceCheck
from tests.test_util_hooks import HookCounter
@pytest.fixture(scope="module", autouse=True)
def dov_proxy_no_xdov():
"""Fixture to start the DOV proxy and set PYDOV_BASE_URL to route
traffic through it.
The DOV proxy behaves as the XDOV se
|
rver would be unavailable.
"""
process = Popen([sys.executable,
os.path.join(os.path.dirname(os.path.abspath(__file__)),
'stub', 'dov_proxy.py'),
'--dov-base-url', build_dov_url('/'),
'--no-xdov'])
time.sleep(2)
orig_base_url = os.environ.get('PYDOV_BASE_URL', None)
os.environ['PYDOV_BASE_URL'] = 'http://localhost:1337/'
yield
if orig_base_url is not None:
os.environ['PYDOV_BASE_URL'] = orig_base_url
else:
del(os.environ['PYDOV_BASE_URL'])
process.terminate()
process.communicate()
@pytest.fixture(scope="module", autouse=True)
def reload_modules(dov_proxy_no_xdov):
"""Reload the boring and grondwaterfilter modules after setting
PYDOV_BASE_URL.
These need to be reloaded because they use the PYDOV_BASE_URL at import
time to set the location of XSD schemas.
Parameters
----------
dov_proxy_no_xdov : pytest.fixture
Fixture starting the DOV proxy and setting PYDOV_BASE_URL accordingly.
"""
reload(pydov.types.boring)
reload(pydov.types.grondwaterfilter)
yield
reload(pydov.types.boring)
reload(pydov.types.grondwaterfilter)
@pytest.fixture(scope="function", autouse=True)
def reset_cache(dov_proxy_no_xdov):
"""Reset the cache to a temporary folder to remove influence from other
tests.
The cache needs to be reset after setting the PYDOV_BASE_URL variable
because at initialisation this URL is used to construct a regex for
determining the datatype of an XML request.
Parameters
----------
dov_proxy_no_xdov : pytest.fixture
Fixture starting the DOV proxy and setting PYDOV_BASE_URL accordingly.
"""
gziptext_cache = GzipTextFileCache(
cachedir=os.path.join(tempfile.gettempdir(), 'pydov_tests_error'),
max_age=datetime.timedelta(seconds=0.1))
gziptext_cache.remove()
orig_cache = pydov.cache
pydov.cache = gziptext_cache
yield
gziptext_cache.remove()
pydov.cache = orig_cache
@pytest.fixture
def test_hook_count():
"""PyTest fixture temporarily disabling default hooks and installing
HookCounter."""
orig_hooks = pydov.hooks
pydov.hooks = Hooks(
(HookCounter(),)
)
yield
pydov.hooks = orig_hooks
class TestNoXDOV(object):
"""Class grouping tests related failing DOV services."""
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_do_not_cache_error(self):
"""Test whether the 404 error page does not end up being cached."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2004-103984')))
assert not os.path.exists(os.path.join(
pydov.cache.cachedir, 'boring', '2004-103984.xml.gz'
))
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_do_not_overwrite_stale_cache(self):
"""Test whether a stale copy of the data which exists in the cache is
not overwritten by the 404 error page."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
testdata_path = os.path.join(
'tests', 'data', 'types', 'boring', 'boring.xml')
cache_path = os.path.join(
pydov.cache.cachedir, 'boring', '2004-103984.xml.gz'
)
os.makedirs(os.path.dirname(cache_path))
with open(testdata_path, 'r') as testdata:
with gzip.open(cache_path, 'wb') as cached_data:
cached_data.write(testdata.read().encode('utf8'))
time.sleep(0.5)
bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2004-103984')))
with gzip.open(cache_path, 'rb') as cached_data:
assert 'GEO-04/169-BNo-B1' in cached_data.read().decode('utf8')
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_stale_warning(self):
"""Test whether a stale version of the data from the cache is used in
case of a service error, and if a warning is issued to the user."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
testdata_path = os.path.join(
'tests', 'data', 'types', 'boring', 'boring.xml')
cache_path = os.path.join(
pydov.cache.cachedir, 'boring', '2004-103984.xml.gz'
)
os.makedirs(os.path.dirname(cache_path))
with open(testdata_path, 'r') as testdata:
with gzip.open(cache_path, 'wb') as cached_data:
cached_data.write(testdata.read().encode('utf8'))
time.sleep(0.5)
with pytest.warns(XmlStaleWarning):
df = bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2004-103984')))
assert not df.iloc[0].boorgatmeting
assert df.iloc[0].boormethode == 'spade'
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_stale_disabled(self):
"""Test whether no stale version of the data from the cache is used
when disabled, and if a warning is issued to the user."""
pydov.cache.stale_on_error = False
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
testdata_path = os.path.join(
'tests', 'data', 'types', 'boring', 'boring.xml')
cache_path = os.path.join(
pydov.cache.cachedir, 'boring', '2004-103984.xml.gz'
)
os.makedirs(os.path.dirname(cache_path))
with open(testdata_path, 'r') as testdata:
with gzip.open(cache_path, 'wb') as cached_data:
cached_data.write(testdata.read().encode('utf8'))
time.sleep(0.5)
with pytest.warns(XmlFetchWarning):
df = bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2004-103984')))
assert np.isnan(df.iloc[0].boorgatmeting)
assert np.isnan(df.iloc[0].boormethode)
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_wfs_data_present(self):
"""Test whether data available in the WFS is present in the dataframe
in case of a service error in XDOV."""
bs = BoringSearch(objecttype=pydov.types.boring.Boring)
df = bs.search(query=PropertyIsEqualTo(
'pkey_boring', build_dov_url('data/boring/2016-122561')))
assert df.iloc[0].gemeente == 'Wortegem-Petegem'
@pytest.mark.online
@pytest.mark.skipif(not ServiceCheck.service_ok(),
reason="DOV service is unreachable")
def test_nan_and_fetch_warning(self):
"""Test whether the XML data is set tot NaN in case of an error and
no stale cache is available. Also test if a warning is given to the
user."""
bs = BoringSearch(objecttype=pydov.types.boring.Bori
|
Pysellus/streaming-api-test
|
api-reader/APIReaderSmartvel.py
|
Python
|
mit
| 381
| 0.005249
|
#!/usr/bin/env python3
from AbstractAPIReader imp
|
ort AbstractAPIReader
from Smartvel import Smartvel
from smartvel_auth import TOKEN
class APIReaderSmartvel(AbstractAPIR
|
eader):
def __init__(self, token=TOKEN, endpoint='events'):
self._token = token
self._endpoint = endpoint
def get_iterable(self):
return Smartvel(self._token, self._endpoint)
|
jonhadfield/fval
|
fval/plan.py
|
Python
|
mit
| 3,930
| 0.002036
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from os.path import splitext
import yaml
def build_plan(discovery_result, config=None):
""" Extract content and determine which checks need to be run
Checks are assigned in the following order:
1. <filename>.fval
2. <dir>.fval
3. default config
Note: As soon as a matching set of tests is found, the build is complete.
Merging/inheritance is not currently supported.
Args:
discovery_result: The paths discovered
config: Options provided in configuration file and
through command line options
Returns:
list: The checks to perform against the discovered paths
"""
logger = config['logger']
plan = list()
for item in discovery_result:
fval_path = item.get('fval_path')
dir_fval_path = item.get('dir_fval_path')
unit_path = item.get('unit_path')
fval_dir_config = None
unit_ext = splitext(unit_path)[1]
unit_tests = list()
# If a 'unit specific' fval file was found, then loads its checks
if fval_path:
try:
with open(fval_path.encode('utf-8')) as fval_file:
cf_yaml_content = yaml.load(fval_file.read())
unit_tests = cf_yaml_content
except IOError:
logger.debug('No unit specific file found for: {0}'.format(fval_path))
# If a fval file was found in the directory, then load its checks
elif dir_fval_path:
try:
with open(
dir_fval_path.encode(
'utf-8'), 'r') as dir_fval_file:
fval_dir_config = yaml.load(dir_fval_file.read())
except IOError:
logger.debug('No dir specific file found for: {0}'.format(fval_path))
except:
raise
# If no tests (specific to the unit or the directory) are found,
# then fall back to master config
if not unit_tests and fval_dir_config and fval_dir_config.get('mappings'):
# CHECK IF UNIT FILE HAS RECOGNISED EXTENSION IN MAPPINGS
matching_templates = list()
for mapping in fval_dir_config.get('mappings'):
if mapping.get('extension') == unit_ext[1:]:
matching_templates = mapping.get('templates')
# IF MATCHING TEMPLATES WERE FOUND, THEN EXTRACT THE CHECKS
if matching_templates:
extracted_checks = dict()
for matching_template in matching_templates:
if matching_template in fval_dir_config.get('templates'):
matched_template = fval_dir_config.get('templates').get(matching_template)
extracted_checks.update(matched_template)
unit_tests = extracted_checks
elif not unit_tests and (config.get('all') or dir_fval_path) and config.get('mappings'):
# CHECK IF UNIT FILE HAS RECOGNISED EXTENSION IN MAPPINGS
matching_templates = list()
for mapping in config.get('mappings'
|
):
if mapping.get('extension') == unit_ext[1:]:
matching_templates = mapping.get('templates')
# IF MATCHING TEMPLATES WERE FOUND, THEN EXTRACT THE CHECKS
if matchi
|
ng_templates:
extracted_checks = dict()
for matching_template in matching_templates:
if config.get('templates') and matching_template in config.get('templates'):
matched_template = config.get('templates').get(matching_template)
extracted_checks.update(matched_template)
unit_tests = extracted_checks
if unit_tests:
plan.append(dict(unit_path=unit_path, unit_checks=unit_tests))
return plan
|
agry/NGECore2
|
scripts/mobiles/yavin4/hooded_crystal_snake.py
|
Python
|
lgpl-3.0
| 1,650
| 0.026667
|
import sys
from services.spawn import MobileTemplate
from services.spawn import WeaponTemplate
from resources.datatables import WeaponType
from resources.datatables import Difficulty
from resources.datatables import Options
from java.util import Vector
def addTemplate(core):
mobileTemplate = MobileTemplate()
mobileTemplate.setCreatureName('crystal_snake_hooded')
mobileTemplate.setLevel(62)
mobileTemplate.setDifficulty(Difficulty.NORMAL)
mobileTemplate.setMinSpawnDistance(4)
mobileTemplate.setMaxSpawnDistance(8)
mobileTemplate.setDeathblow(False)
|
mobileTemplate.setScale(1)
mobileTemplate.setMeatType("Carnivore Meat")
mobileTemplate.setMeatAmount(5)
mobileTempl
|
ate.setHideType("Scaley Hide")
mobileTemplate.setHideAmount(2)
mobileTemplate.setSocialGroup("crystal snake")
mobileTemplate.setAssistRange(12)
mobileTemplate.setStalker(False)
mobileTemplate.setOptionsBitmask(Options.AGGRESSIVE | Options.ATTACKABLE)
templates = Vector()
templates.add('object/mobile/shared_crystal_snake.iff')
mobileTemplate.setTemplates(templates)
weaponTemplates = Vector()
weapontemplate = WeaponTemplate('object/weapon/melee/unarmed/shared_unarmed_default.iff', WeaponType.UNARMED, 1.0, 6, 'kinetic')
weaponTemplates.add(weapontemplate)
mobileTemplate.setWeaponTemplateVector(weaponTemplates)
attacks = Vector()
attacks.add('bm_bite_4')
attacks.add('bm_hamstring_4')
attacks.add('bm_puncture_2')
mobileTemplate.setDefaultAttack('creatureMeleeAttack')
mobileTemplate.setAttacks(attacks)
core.spawnService.addMobileTemplate('hooded_crystal_snake', mobileTemplate)
return
|
wxgeo/geophar
|
wxgeometrie/sympy/printing/tests/test_rcode.py
|
Python
|
gpl-2.0
| 14,177
| 0.003597
|
from sympy.core import (S, pi, oo, Symbol, symbols, Rational, Integer,
GoldenRatio, EulerGamma, Catalan, Lambda, Dummy, Eq)
from sympy.functions import (Piecewise, sin, cos, Abs, exp, ceiling, sqrt,
gamma, sign, Max, Min, factorial, beta)
from sympy.sets import Range
from sympy.logic import ITE
from sympy.codegen import For, aug_assign, Assignment
from sympy.utilities.pytest import raises
from sympy.printing.rcode import RCodePrinter
from sympy.utilities.lambdify import implemented_function
from sympy.tensor import IndexedBase, Idx
from sympy.matrices import Matrix, MatrixSymbol
from sympy import rcode
from difflib import Differ
from pprint import pprint
x, y, z = symbols('x,y,z')
def test_printmethod():
class fabs(Abs):
def _rcode(self, printer):
return "abs(%s)" % printer._print(self.args[0])
assert rcode(fabs(x)) == "abs(x)"
def test_rcode_sqrt():
assert rcode(sqrt(x)) == "sqrt(x)"
assert rcode(x**0.5) == "sqrt(x)"
assert rcode(sqrt(x)) == "sqrt(x)"
def test_rcode_Pow():
assert rcode(x**3) == "x^3"
assert rcode(x**(y**3)) == "x^(y^3)"
g = implemented_function('g', Lambda(x, 2*x))
assert rcode(1/(g(x)*3.5)**(x - y**x)/(x**2 + y)) == \
"(3.5*2*x)^(-x + y^x)/(x^2 + y)"
assert rcode(x**-1.0) == '1.0/x'
assert rcode(x**Rational(2, 3)) == 'x^(2.0/3.0)'
_cond_cfunc = [(lambda base, exp: exp.is_integer, "dpowi"),
(lambda base, exp: not exp.is_integer, "pow")]
assert rcode(x**3, user_functions={'Pow': _cond_cfunc}) == 'dpowi(x, 3)'
assert rcode(x**3.2, user_functions={'Pow': _cond_cfunc}) == 'pow(x, 3.2)'
def test_rcode_Max():
# Test for gh-11926
assert rcode(Max(x,x*x),user_functions={"Max":"my_max", "Pow":"my_pow"}) == 'my_max(x, my_pow(x, 2))'
def test_rcode_constants_mathh():
p=rcode(exp(1))
assert rcode(exp(1)) == "exp(1)"
assert rcode(pi) == "pi"
assert rcode(oo) == "Inf"
assert rcode(-oo) == "-Inf"
def test_rcode_constants_other():
assert rcode(2*GoldenRatio) == "GoldenRatio = 1.61803398874989;\n2*GoldenRatio"
assert rcode(
2*Catalan) == "Catalan = 0.915965594177219;\n2*Catalan"
assert rcode(2*EulerGamma) == "EulerGamma = 0.577215664901533;\n2*EulerGamma"
def test_rcode_Rational():
assert rcode(Rational(3, 7)) == "3.0/7.0"
assert rcode(Rational(18, 9)) == "2"
assert rcode(Rational(3, -7)) == "-3.0/7.0"
assert rcode(Rational(-3, -7)) == "3.0/7.0"
assert rcode(x + Rational(3, 7)) == "x + 3.0/7.0"
assert rcode(Rational(3, 7)*x) == "(3.0/7.0)*x"
def test_rcode_Integer():
assert rcode(Integer(67)) == "67"
assert rcode(Integer(-1)) == "-1"
def test_rcode_functions():
assert rcode(sin(x) ** cos(x)) == "sin(x)^cos(x)"
assert rcode(factorial(x) + gamma(y)) == "factorial(x) + gamma(y)"
assert rcode(beta(Min(x, y), Max(x, y))) == "beta(min(x, y), max(x, y))"
def test_rcode_inline_function():
x = symbols('x')
g = implemented_function('g', Lambda(x, 2*x))
assert rcode(g(x)) == "2*x"
g = implemented_function('g', Lambda(x, 2*x/Catalan))
assert rcode(
g(x)) == "Catalan = %s;\n2*x/Catalan" % Catalan.n()
A = IndexedBase('A')
i = Idx('i', symbols('n', integer=True))
g = implemented_function('g', Lambda(x, x*(1 + x)*(2 + x)))
res=rcode(g(A[i]), assign_to=A[i])
ref=(
"for (i in 1:n){\n"
" A[i] = (A[i] + 1)*(A[i] + 2)*A[i];\n"
"}"
)
assert res == ref
def test_rcode_exceptions():
assert rcode(ceiling(x)) == "ceiling(x)"
assert rcode(Abs(x)) == "abs(x)"
assert rcode(gamma(x)) == "gamma(x)"
def test_rcode_user_functions():
x = symbols('x', integer=False)
n = symbols('n', integer=True)
custom_functions = {
"ceiling": "myceil",
"Abs": [(lambda x: not x.is_integer, "fabs"), (lambda x: x.is_integer, "abs")],
}
assert rcode(ceiling(x), user_functions=custom_functions) == "myceil(x)"
assert rcode(Abs(x), user_functions=custom_functions) == "fabs(x)"
assert rcode(Abs(n), user_functions=custom_functions) == "abs(n)"
def test_rcode_boolean():
assert rcode(True) == "True"
assert rcode(S.true) == "True"
assert rcode(False) == "False"
assert rcode(S.false) == "False"
assert rcode(x & y) == "x & y"
assert rcode(x | y) == "x | y"
assert rcode(~x) == "!x"
assert rcode(x & y & z) == "x & y & z"
assert rcode(x | y | z) == "x | y | z"
a
|
ssert rcode((x & y) | z) == "z | x & y"
assert rcode((x | y) & z) =
|
= "z & (x | y)"
def test_rcode_Relational():
from sympy import Eq, Ne, Le, Lt, Gt, Ge
assert rcode(Eq(x, y)) == "x == y"
assert rcode(Ne(x, y)) == "x != y"
assert rcode(Le(x, y)) == "x <= y"
assert rcode(Lt(x, y)) == "x < y"
assert rcode(Gt(x, y)) == "x > y"
assert rcode(Ge(x, y)) == "x >= y"
def test_rcode_Piecewise():
expr = Piecewise((x, x < 1), (x**2, True))
res=rcode(expr)
ref="ifelse(x < 1,x,x^2)"
assert res == ref
tau=Symbol("tau")
res=rcode(expr,tau)
ref="tau = ifelse(x < 1,x,x^2);"
assert res == ref
expr = 2*Piecewise((x, x < 1), (x**2, x<2), (x**3,True))
assert rcode(expr) == "2*ifelse(x < 1,x,ifelse(x < 2,x^2,x^3))"
res = rcode(expr, assign_to='c')
assert res == "c = 2*ifelse(x < 1,x,ifelse(x < 2,x^2,x^3));"
# Check that Piecewise without a True (default) condition error
#expr = Piecewise((x, x < 1), (x**2, x > 1), (sin(x), x > 0))
#raises(ValueError, lambda: rcode(expr))
expr = 2*Piecewise((x, x < 1), (x**2, x<2))
assert(rcode(expr))== "2*ifelse(x < 1,x,ifelse(x < 2,x^2,NA))"
def test_rcode_sinc():
from sympy import sinc
expr = sinc(x)
res = rcode(expr)
ref = "ifelse(x != 0,sin(x)/x,1)"
assert res == ref
def test_rcode_Piecewise_deep():
p = rcode(2*Piecewise((x, x < 1), (x + 1, x < 2), (x**2, True)))
assert p == "2*ifelse(x < 1,x,ifelse(x < 2,x + 1,x^2))"
expr = x*y*z + x**2 + y**2 + Piecewise((0, x < 0.5), (1, True)) + cos(z) - 1
p = rcode(expr)
ref="x^2 + x*y*z + y^2 + ifelse(x < 0.5,0,1) + cos(z) - 1"
assert p == ref
ref="c = x^2 + x*y*z + y^2 + ifelse(x < 0.5,0,1) + cos(z) - 1;"
p = rcode(expr, assign_to='c')
assert p == ref
def test_rcode_ITE():
expr = ITE(x < 1, y, z)
p = rcode(expr)
ref="ifelse(x < 1,y,z)"
assert p == ref
def test_rcode_settings():
raises(TypeError, lambda: rcode(sin(x), method="garbage"))
def test_rcode_Indexed():
from sympy.tensor import IndexedBase, Idx
from sympy import symbols
n, m, o = symbols('n m o', integer=True)
i, j, k = Idx('i', n), Idx('j', m), Idx('k', o)
p = RCodePrinter()
p._not_r = set()
x = IndexedBase('x')[j]
assert p._print_Indexed(x) == 'x[j]'
A = IndexedBase('A')[i, j]
assert p._print_Indexed(A) == 'A[i, j]'
B = IndexedBase('B')[i, j, k]
assert p._print_Indexed(B) == 'B[i, j, k]'
assert p._not_r == set()
def test_rcode_Indexed_without_looking_for_contraction():
len_y = 5
y = IndexedBase('y', shape=(len_y,))
x = IndexedBase('x', shape=(len_y,))
Dy = IndexedBase('Dy', shape=(len_y-1,))
i = Idx('i', len_y-1)
e=Eq(Dy[i], (y[i+1]-y[i])/(x[i+1]-x[i]))
code0 = rcode(e.rhs, assign_to=e.lhs, contract=False)
assert code0 == 'Dy[i] = (y[%s] - y[i])/(x[%s] - x[i]);' % (i + 1, i + 1)
def test_rcode_loops_matrix_vector():
n, m = symbols('n m', integer=True)
A = IndexedBase('A')
x = IndexedBase('x')
y = IndexedBase('y')
i = Idx('i', m)
j = Idx('j', n)
s = (
'for (i in 1:m){\n'
' y[i] = 0;\n'
'}\n'
'for (i in 1:m){\n'
' for (j in 1:n){\n'
' y[i] = A[i, j]*x[j] + y[i];\n'
' }\n'
'}'
)
c = rcode(A[i, j]*x[j], assign_to=y[i])
assert c == s
def test_dummy_loops():
# the following line could also be
# [Dummy(s, integer=True) for s in 'im']
# or [Dummy(integer=True) for s in 'im']
i, m = symbols('i m', integer=True, cls=Dummy)
x = IndexedBase('x')
y = IndexedBase('y')
|
usnistgov/corr
|
corr-cloud/cloud/views/admin_generation.py
|
Python
|
mit
| 3,973
| 0.004782
|
from corrdb.common.models import UserModel
from corrdb.common.models import ProfileModel
from corrdb.common import get_or_create
import hashlib
import datetime
import simplejson as json
import os
import re
def password_check(password):
"""
Verify the strength of 'password'
Returns a dict indicating the wrong criteria
A password is considered strong if:
12 characters length or more
1 digit or more
1 symbol or more
1 uppercase letter or more
1 lowercase letter or more
"""
# calculating the length
length_error = len(password) < 12
# searching for digits
digit_error = re.search(r"\d", password) is None
# searching for uppercase
uppercase_error = re.search(r"[A-Z]", password) is None
# searching for lowercase
lowercase_error = re.search(r"[a-z]", password) is None
# searching for symbols
# ]\;',./!@#$%^&*()_+-=
symbol_error = not any(i in "]\;',./!@#$%^&*()_+-=]" for i in password)
# overall result
password_ok = not ( length_error or digit_error or uppercase_error or lowercase_error or symbol_error )
return password_ok
def check_admin(email=None):
"""
Check that admin account does not already exist
Returns boolean to indicate if it is true or false
"""
if email:
account = UserModel.objects(email=email).first()
if account and account.group == "admin":
return True
else:
admin = UserModel.objects(group="admin").first()
if admin:
# We only want to allow the creation of one admin
# Only the original admin can add new admins.
# Once created another admin cannot be added this way
# for security purposes.
print("Admins already exist!")
return True
else:
return False
else:
# Fake admin existence to avoid attempt to create admin with void email.
return True
def create_admin(email, password, fname, lname):
"""
Creates the first admin user
Returns boolean to indicate if the account was created or not
"""
if not password_check(password):
return False
else:
hash_pwd = hashlib.sha256(('CoRRPassword_%s'%password).encode("ascii")).hexdigest()
(account, created) = get_or_create(document=UserModel, created_at=str(datetime.datetime.utcnow()), email=email, group='admin', api_token=hashlib.sha256(('CoRRToken_%s_%s'%(email, str(datetime.datetime.utcnow()))).encode("ascii")).hexdigest())
if created:
account.password = hash_pwd
account.save()
(profile_model, created) = get_or_create(document=ProfileModel, created_at=str(datetime.datetime.utcnow()), user=account, fname=fname, lname=lname)
if created:
return True
else:
return False
else:
return False
content =
|
{}
# Loading admin user account information.
# The instance admin shoul
|
d make sure to securely backup this file.
if os.path.isfile("/home/corradmin/credentials/tmp_admin.json"):
with open("/home/corradmin/credentials/tmp_admin.json", "r") as admin_stuff:
content = json.loads(admin_stuff.read())
try:
if not check_admin(content['admin-email']):
print("Creating an admin account!")
create_admin(content['admin-email'], content['admin-password'], content['admin-fname'], content['admin-lname'])
except:
print("An error occured!")
else:
with open("/tmp/tmp_admin.json", "r") as admin_stuff:
content = json.loads(admin_stuff.read())
try:
if not check_admin(content['admin-email']):
print("Creating an admin account!")
create_admin(content['admin-email'], content['admin-password'], content['admin-fname'], content['admin-lname'])
except:
print("An error occured!")
|
phillxnet/rockstor-core
|
src/rockstor/cli/iscsi_console.py
|
Python
|
gpl-3.0
| 1,794
| 0
|
"""
Copyright (c) 2012-2020 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public Lic
|
ense for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from base_console import BaseConsole
from share_iscsi_console import ShareIscsiConsole
from rest_util import
|
api_call
class IscsiConsole(BaseConsole):
def __init__(self, prompt):
BaseConsole.__init__(self)
self.prompt = prompt + " Iscsi>"
self.url = BaseConsole.url + "sm/services/iscsi/"
def do_status(self, args):
iscsi_info = api_call(self.url)
print(iscsi_info)
def put_wrapper(self, args, command):
input_data = {
"command": command,
}
iscsi_info = api_call(self.url, data=input_data, calltype="put")
print(iscsi_info)
def do_start(self, args):
return self.put_wrapper(args, "start")
def do_stop(self, args):
return self.put_wrapper(args, "stop")
def do_share(self, args):
input_share = args.split()
if len(input_share) > 0:
si_console = ShareIscsiConsole(input_share[0])
if len(input_share) > 1:
si_console.onecmd(" ".join(input_share[1:]))
else:
si_console.cmdloop()
|
google/llvm-propeller
|
lldb/test/API/lang/cpp/alignas_base_class/TestAlignAsBaseClass.py
|
Python
|
apache-2.0
| 473
| 0.004228
|
import lldb
from ll
|
dbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
@no_debug_info_test
def test(self):
self.build()
self.dbg.CreateTarget(self.getBuildArtifact("a.out"))
# The offset of f2 should be 8 because of `alignas(8)`.
self.expect_expr("(intptr_t)&d3g.f2 - (intptr_t)&d3g", result_value="8")
| |
dkelsey/payment-remittance-processor
|
scripts/verifyHeader.py
|
Python
|
apache-2.0
| 2,156
| 0.014842
|
#!/usr/bin/env python
'''
headderTemplate: an immutale tuple listing, in order, the expected headders
'''
headderTemplate = (
"Payee Name: ",
"Payee ID: ",
"Payee Site: ",
"Payment Number: ",
"Payment Date: ",
)
'''
headderConverter
|
s: a dictionary key'd on the Headder title. the value is a lambda to f
|
ormat data values
'''
headderConverters = {
"Payee Name: " : (lambda x: x),
"Payee ID: " : (lambda x: x),
"Payee Site: " : (lambda x: x),
"Payment Number: " : (lambda x: x),
"Payment Date: " : (lambda x: x),
}
'''
Perform some preliminary validataion of the input file
'''
csvfile = 'BCLDB_Payment_Remittance_74596_2014-7-31.csv'
#f = open ('BCLDB_Payment_Remittance_75249_2014-8-11.csv', 'r')
#f = open ('BCLDB_Payment_Remittance_73976_2014-7-15.csv', 'r')
f = open (csvfile, 'r')
headders = ()
for l in f.readlines()[0:5]:
# make a list and slice the first value out and append to another tuple
headders = headders + (''.join(l).replace('"','').strip().split(',')[0],)
'''
validate the input file has the correct number of data headders
'''
if (len(headderTemplate) == len(headders)) :
print "File has same number of headders"
else:
print "File has different headders"
raise NameError('wrong number of headders')
'''
validate that the input file has the correct headders in the correct order
'''
for f, b in zip(headderTemplate,headders):
if (f != b) :
raise NameError('Headder Problems f != b: {0}, {1}'.format(f, b))
print "File has correct Headder in correct order"
'''
Everything seems ok so proceed with processing the file so that it is ready for import
into MySQL.
'''
# preproces file for import and write new file
# convert Transation Date and Batch Date to usable date types.
f = open (csvfile, 'r')
data = []
for h, l in zip(headders,f.readlines()[0:5]):
# parse out the data from the line
d = ''.join(l).replace('"','').strip().split(',')[1]
# apply any formatting from he headderConverters and append to data
data = data + [','.join([ headderConverters[h](d) ])]
# print ','.join([ headderConverters[h](d) ])
print ','.join(data)
|
albertz/music-player
|
mac/pyobjc-framework-Cocoa/PyObjCTest/test_nsxmlnodeoptions.py
|
Python
|
bsd-2-clause
| 2,358
| 0.001272
|
from Foundation import *
from PyObjCTools.TestSupport import *
class TestNSXMLNodeOptions (TestCase):
def testConstants(self):
self.assertEqual(NSXMLNodeOptionsNone, 0)
self.assertEqual(NSXMLNodeIsCDATA, 1 << 0)
self.assertEqual(NSXMLNodeExpandEmptyElement, 1 << 1)
self.assertEqual(NSXMLNodeCompactEmptyElement, 1 << 2)
self.assertEqual(NSXMLNodeUseSingleQuotes, 1 << 3)
self.assertEqual(NSXMLNodeUseDoubleQuotes, 1 << 4)
self.assertEqual(NSXMLDocumentTidyHTML, 1 << 9)
self.assertEqual(NSXMLDocumentTidyXML, 1 << 10)
self.assertEqual(NSXMLDocumentValidate, 1 << 13)
self.assertEqual(NSXMLNodeLoadExternalEntitiesAlways, 1 << 14)
self.assertEqual(NSXMLNodeLoadExternalEntitiesSameOriginOnly, 1 << 15)
self.assertEqual(NSXMLNodeLoadExternalEntitiesNever, 1 << 19)
self.assertEqual(NSXMLDocumentXInclude, 1 << 16)
self.assertEqual(NSXMLNodePrettyPrint, 1 << 17)
self.assertEqual(NSXMLDocumentIncludeContentTypeDeclaration, 1 << 18)
self.assertEqual(NSXMLNodePreserveNamespaceOrder, 1 << 20)
self.assertEqual(NSXMLNodePreserveAttributeOrder, 1 << 21)
self.assertEqual(NSXMLNodePreserveEntities, 1 << 22)
self.assertEqual(NSXMLNodePreservePrefixes, 1 << 23)
self.ass
|
ertEqual(NSXMLNodePreserveCDATA, 1 << 24)
self.assertEqual(NSXMLNodePreserveWhitespace, 1 << 25)
self.assertEqual(NSXMLNodePreserveDTD, 1 << 26)
self.assertEqual(NSXMLNodeP
|
reserveCharacterReferences, 1 << 27)
self.assertEqual(NSXMLNodePreserveEmptyElements, (
NSXMLNodeExpandEmptyElement | NSXMLNodeCompactEmptyElement))
self.assertEqual(NSXMLNodePreserveQuotes, (NSXMLNodeUseSingleQuotes | NSXMLNodeUseDoubleQuotes))
self.assertEqual(NSXMLNodePreserveAll & 0xFFFFFFFF, 0xFFFFFFFF & (
NSXMLNodePreserveNamespaceOrder |
NSXMLNodePreserveAttributeOrder |
NSXMLNodePreserveEntities |
NSXMLNodePreservePrefixes |
NSXMLNodePreserveCDATA |
NSXMLNodePreserveEmptyElements |
NSXMLNodePreserveQuotes |
NSXMLNodePreserveWhitespace |
NSXMLNodePreserveDTD |
NSXMLNodePreserveCharacterReferences |
0xFFF00000))
if __name__ == "__main__":
main()
|
GovReady/readthedocs.org
|
readthedocs/rtd_tests/mocks/mock_api.py
|
Python
|
mit
| 2,893
| 0
|
from contextlib import contextmanager
import json
import mock
# Mock tastypi API.
class ProjectData(object):
def get(self):
return dict()
def mock_version(repo):
class MockVersion(object):
def __init__(self, x=None):
pass
def put(self, x=None):
return x
def get(self, **kwargs):
# SCIENTIST DOG
version = json.loads("""
{
"active": false,
|
"built": false,
"id": "12095",
"identifier": "remotes/origin/zip_importing",
"resource_uri": "/api/v1/version/12095/",
"slug": "zip_importing",
|
"uploaded": false,
"verbose_name": "zip_importing"
}""")
project = json.loads("""
{
"absolute_url": "/projects/docs/",
"analytics_code": "",
"copyright": "",
"default_branch": "",
"default_version": "latest",
"description": "Make docs.readthedocs.org work :D",
"django_packages_url": "",
"documentation_type": "sphinx",
"id": "2599",
"modified_date": "2012-03-12T19:59:09.130773",
"name": "docs",
"project_url": "",
"pub_date": "2012-02-19T18:10:56.582780",
"repo": "git://github.com/rtfd/readthedocs.org",
"repo_type": "git",
"requirements_file": "",
"resource_uri": "/api/v1/project/2599/",
"slug": "docs",
"subdomain": "http://docs.readthedocs.org/",
"suffix": ".rst",
"theme": "default",
"use_virtualenv": false,
"users": [
"/api/v1/user/1/"
],
"version": ""
}""")
version['project'] = project
project['repo'] = repo
if 'slug' in kwargs:
return {'objects': [version], 'project': project}
else:
return version
return MockVersion
class MockApi(object):
def __init__(self, repo):
self.version = mock_version(repo)
def project(self, x):
return ProjectData()
@contextmanager
def mock_api(repo):
api_mock = MockApi(repo)
with (
mock.patch('readthedocs.restapi.client.api', api_mock) and
mock.patch('readthedocs.api.client.api', api_mock) and
mock.patch('readthedocs.projects.tasks.api_v2', api_mock) and
mock.patch('readthedocs.projects.tasks.api_v1', api_mock)):
yield api_mock
|
Osmose/snippets-service-prototype
|
snippets/base/urls.py
|
Python
|
bsd-3-clause
| 836
| 0.001196
|
from django.conf.urls.defaults import patterns, url
from snippets.base import views
urlpatterns = patterns('',
url(r'^$', views.index, name='base.index'),
url(r'^(?P<startpage_version>[^/]+)/(?P<name>[^/]+)/(?P<version>[^/]+)/'
'(?P<appbuildid>[^/]+)/(?P<build_target>[^/]+)/(?P<locale>[^/]+)/'
'(?P<channe
|
l>[^/]+)/(?P<
|
os_version>[^/]+)/(?P<distribution>[^/]+)/'
'(?P<distribution_version>[^/]+)/$', views.fetch_snippets,
name='view_snippets'),
url(r'^admin/base/snippet/preview/', views.preview_empty,
name='base.admin.preview_empty'),
url(r'^admin/base/snippet/(\d+)/preview/', views.preview_snippet,
name='base.admin.preview_snippet'),
url(r'^admin/base/snippettemplate/(\d+)/variables/',
views.admin_template_json, name='base.admin.template_json'),
)
|
litex-hub/lxbe-tool
|
lxbe_tool/providers/tool/fpga/xilinx/vivado.py
|
Python
|
apache-2.0
| 844
| 0.004739
|
def check_vivado(args):
vivado_path = get_command("vivado")
if vivado_path == None:
# Look for the default Vivado install directory
if os.name == 'nt':
base_dir = r"C:\Xilinx\Vivado"
else:
base_dir = "/opt/Xilinx/Vivado"
if os.path.exists(base_dir):
for file in os.listdir(base_dir):
bin_dir = base_dir + os.path.sep + file + os.path.sep + "bin"
if os.path.exists(bin_dir + os.path.sep + "vivado"):
os.environ["PATH"] += os.pathsep + bin_dir
vivado_path = bin_dir
break
if vivado_path == None:
return (False, "toolchain not found i
|
n your PATH", "download it from https://www.xilinx.com/support/download.html")
return (True
|
, "found at {}".format(vivado_path))
|
ric2b/Vivaldi-browser
|
chromium/build/android/pylib/output/local_output_manager_test.py
|
Python
|
bsd-3-clause
| 930
| 0.004301
|
#! /usr/bin/env vpython3
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=protected-access
import tempfile
import shutil
import unittest
from pylib.base import output_manager
from
|
pylib.base import output_manager_test_case
from pylib.output import local_output_manager
class LocalOutputManagerTest(output_manager_test_case.OutputManagerTestCase):
def setUp(self):
self._output_dir = tempfile.mkdtemp()
self._output_manager = local_output_manager.LocalOutputManager(
self._output_dir)
def testUsableTempFile(self):
self.assertUsableTempFile(
self._output_manager._CreateA
|
rchivedFile(
'test_file', 'test_subdir', output_manager.Datatype.TEXT))
def tearDown(self):
shutil.rmtree(self._output_dir)
if __name__ == '__main__':
unittest.main()
|
dennisobrien/bokeh
|
bokeh/models/layouts.py
|
Python
|
bsd-3-clause
| 6,846
| 0.003944
|
''' Various kinds of layout components.
'''
from __future__ import absolute_import
i
|
mport logging
logger = logging.getLogger(__name__)
from ..core.enums import SizingMode
from ..core.has_props import abstract
from ..core.properties import Bool, Enum, Int, Instance, List, Seq, String
from ..core.validation import warning
from ..core.validation.warnings import
|
BOTH_CHILD_AND_ROOT, EMPTY_LAYOUT
from ..model import Model
@abstract
class LayoutDOM(Model):
''' An abstract base class for layout components.
'''
width = Int(help="""
An optional width for the component (in pixels).
""")
height = Int(help="""
An optional height for the component (in pixels).
""")
disabled = Bool(False, help="""
Whether the widget will be disabled when rendered. If ``True``,
the widget will be greyed-out, and not respond to UI events.
""")
sizing_mode = Enum(SizingMode, default="fixed", help="""
How the item being displayed should size itself. Possible values are
``"fixed"``, ``"scale_width"``, ``"scale_height"``, ``"scale_both"``, and
``"stretch_both"``.
``"stretch_both"`` elements are completely responsive (independently in width and height) and
will resize to occupy all available space, even if this changes the aspect ratio of the element.
This is sometimes called outside-in, and is a typical behavior for desktop applications.
``"fixed"`` elements are not responsive. They will retain their original width and height
regardless of any subsequent browser window resize events.
``"scale_width"`` elements will responsively resize to fit to the width available, *while
maintaining the original aspect ratio*. This is a typical behavior for modern websites. For a
``Plot``, the aspect ratio ``plot_width/plot_height`` is maintained.
``"scale_height"`` elements will responsively resize to fit to the height available, *while
maintaining the original aspect ratio*. For a ``Plot``, the aspect ratio
``plot_width/plot_height`` is maintained. A plot with ``"scale_height"`` mode needs
to be wrapped in a ``Row`` or ``Column`` to be responsive.
``"scale_both"`` elements will responsively resize to for both the width and height available,
*while maintaining the original aspect ratio*.
""")
# List in order for in-place changes to trigger changes, ref: https://github.com/bokeh/bokeh/issues/6841
css_classes = List(String, help="""
A list of css class names to add to this DOM element. Note: the class names are
simply added as-is, no other guarantees are provided.
It is also permissible to assign from tuples, however these are adapted -- the
property will always contain a list.
""").accepts(Seq(String), lambda x: list(x))
class Spacer(LayoutDOM):
''' A container for space used to fill an empty spot in a row or column.
'''
class WidgetBox(LayoutDOM):
''' A container for widgets that are part of a layout.
'''
def __init__(self, *args, **kwargs):
if len(args) > 0 and "children" in kwargs:
raise ValueError("'children' keyword cannot be used with positional arguments")
elif len(args) > 0:
kwargs["children"] = list(args)
super(WidgetBox, self).__init__(**kwargs)
@warning(EMPTY_LAYOUT)
def _check_empty_layout(self):
from itertools import chain
if not list(chain(self.children)):
return str(self)
@warning(BOTH_CHILD_AND_ROOT)
def _check_child_is_also_root(self):
problems = []
for c in self.children:
if c.document is not None and c in c.document.roots:
problems.append(str(c))
if problems:
return ", ".join(problems)
else:
return None
children = List(Instance('bokeh.models.widgets.Widget'), help="""
The list of widgets to put in the layout box.
""")
@abstract
class Box(LayoutDOM):
''' Abstract base class for Row and Column. Do not use directly.
'''
def __init__(self, *args, **kwargs):
if len(args) > 0 and "children" in kwargs:
raise ValueError("'children' keyword cannot be used with positional arguments")
elif len(args) > 0:
kwargs["children"] = list(args)
unwrapped_children = kwargs.get("children", [])
kwargs["children"] = self._wrap_children(unwrapped_children)
super(Box, self).__init__(**kwargs)
def _wrap_children(self, children):
''' Wrap any Widgets of a list of child layouts in a WidgetBox.
This allows for the convenience of just spelling Row(button1, button2).
'''
from .widgets.widget import Widget
wrapped_children = []
for child in children:
if isinstance(child, Widget):
child = WidgetBox(
children=[child],
sizing_mode=child.sizing_mode,
width=child.width,
height=child.height,
disabled=child.disabled
)
wrapped_children.append(child)
return wrapped_children
@warning(EMPTY_LAYOUT)
def _check_empty_layout(self):
from itertools import chain
if not list(chain(self.children)):
return str(self)
@warning(BOTH_CHILD_AND_ROOT)
def _check_child_is_also_root(self):
problems = []
for c in self.children:
if c.document is not None and c in c.document.roots:
problems.append(str(c))
if problems:
return ", ".join(problems)
else:
return None
#TODO Debating the following instead to prevent people adding just a plain
# widget into a box, which sometimes works and sometimes looks disastrous
#children = List(
# Either(
# Instance('bokeh.models.layouts.Row'),
# Instance('bokeh.models.layouts.Column'),
# Instance('bokeh.models.plots.Plot'),
# Instance('bokeh.models.layouts.WidgetBox')
# ), help="""
# The list of children, which can be other components including plots, rows, columns, and widgets.
#""")
children = List(Instance(LayoutDOM), help="""
The list of children, which can be other components including plots, rows, columns, and widgets.
""")
class Row(Box):
''' Lay out child components in a single horizontal row.
Children can be specified as positional arguments, as a single argument
that is a sequence, or using the ``children`` keyword argument.
'''
class Column(Box):
''' Lay out child components in a single vertical row.
Children can be specified as positional arguments, as a single argument
that is a sequence, or using the ``children`` keyword argument.
'''
|
tdyas/pants
|
src/python/pants/backend/jvm/tasks/coverage/manager.py
|
Python
|
apache-2.0
| 6,621
| 0.002568
|
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import logging
import os
import shutil
from pants.backend.jvm.subsystems.scoverage_platform import ScoveragePlatform
from pants.backend.jvm.tasks.coverage.cobertura import Cobertura
from pants.backend.jvm.tasks.coverage.engine import NoCoverage
from pants.backend.jvm.tasks.coverage.jacoco import Jacoco
from pants.backend.jvm.tasks.coverage.scoverage import Scoverage
from pants.subsystem.subsystem import Subsystem
from pants.util.dirutil import safe_mkdir
from pants.util.strutil import safe_shlex_split
logger = logging.getLogger(__name__)
class CodeCoverageSettings:
"""A class containing settings for code coverage tasks."""
def __init__(
self,
options,
context,
workdir,
tool_classpath,
confs,
log,
copy2=shutil.copy2,
copytree=shutil.copytree,
is_file=os.path.isfile,
safe_md=safe_mkdir,
):
self.options = options
self.context = context
self.workdir = workdir
self.tool_classpath = tool_classpath
self.confs = confs
self.log = log
self.coverage_dir = os.path.join(self.workdir, "coverage")
self.coverage_jvm_options = []
for jvm_option in options.coverage_jvm_options:
self.coverage_jvm_options.extend(safe_shlex_split(jvm_option))
self.coverage_open = options.coverage_open
self.coverage_force = options.coverage_force
# Injecting these methods to make unit testing cleaner.
self.copy2 = copy2
self.copytree = copytree
self.is_file = is_file
self.safe_makedir = safe_md
@classmethod
def from_task(cls, task, workdir=None):
return cls(
options=task.get_options(),
context=task.context,
workdir=workdir or task.workdir,
tool_classpath=task.tool_classpath,
confs=task.confs,
log=task.context.log,
)
class CodeCoverage(Subsystem):
"""Manages setup and construction of JVM code coverage engines."""
options_scope = "coverage"
@classmethod
def subsystem_dependencies(cls):
return super().subsystem_dependencies() + (
Cobertura.Factory,
Jacoco.Factory,
Scoverage.Factory,
)
# TODO(jtrobec): move these to subsystem scope after deprecating
@staticmethod
def register_junit_options(register, register_jvm_tool):
register("--coverage", type=bool, fingerprint=True, help="Collect code coverage data.")
register(
"--coverage-processor",
advanced=True,
fingerprint=True,
choices=["cobertura", "jacoco", "scoverage"],
default=None,
help="Which coverage processor to use if --coverage is enabled. If this option is "
"unset but coverage is enabled implicitly or explicitly, defaults to 'cobertura'. "
"If this option is explicitly set, implies --coverage. If this option is set to "
"scoverage, then first scoverage MUST be enabled by passing option "
"--scoverage-enable-scoverage.",
)
# We need to fingerprint this even though it nominally UI-only affecting option since the
# presence of this option alone can implicitly flag on `--coverage`.
register(
"--coverage-open",
type=bool,
fingerprint=True,
help="Open the generated HTML coverage report in a browser. Implies --coverage ",
)
register(
"--coverage-jvm-options",
advanced=True,
type=list,
fingerprint=Tr
|
ue,
help="JVM flags to be added when running the coverage processor. For example: "
"{flag}=-Xmx4g {flag}=-Xms2g".format(flag="--coverage-jvm-options"),
)
register(
"--coverage-force",
advanced=True,
type=bool,
help="Attempt to run the reporting phase of coverage even if tests failed
|
"
"(defaults to False, as otherwise the coverage results would be unreliable).",
)
# register options for coverage engines
# TODO(jtrobec): get rid of these calls when engines are dependent subsystems
Cobertura.register_junit_options(register, register_jvm_tool)
class InvalidCoverageEngine(Exception):
"""Indicates an invalid coverage engine type was selected."""
def get_coverage_engine(self, task, output_dir, all_targets, execute_java):
options = task.get_options()
enable_scoverage = ScoveragePlatform.global_instance().get_options().enable_scoverage
processor = options.coverage_processor
if processor == "scoverage" and not enable_scoverage:
raise self.InvalidCoverageEngine(
"Cannot set processor to scoverage without first enabling "
"scoverage (by passing --scoverage-enable-scoverage option)"
)
if enable_scoverage:
if processor not in (None, "scoverage"):
raise self.InvalidCoverageEngine(
f"Scoverage is enabled. "
f"Cannot use {processor} as the engine. Set engine to scoverage "
f"(--test-junit-coverage-processor=scoverage)"
)
processor = "scoverage"
if options.coverage or processor or options.is_flagged("coverage_open"):
settings = CodeCoverageSettings.from_task(task, workdir=output_dir)
if processor in ("cobertura", None):
return Cobertura.Factory.global_instance().create(
settings, all_targets, execute_java
)
elif processor == "jacoco":
return Jacoco.Factory.global_instance().create(settings, all_targets, execute_java)
elif processor == "scoverage":
return Scoverage.Factory.global_instance().create(
settings, all_targets, execute_java
)
else:
# NB: We should never get here since the `--coverage-processor` is restricted by `choices`,
# but for clarity.
raise self.InvalidCoverageEngine(
"Unknown and unexpected coverage processor {!r}!".format(
options.coverage_processor
)
)
else:
return NoCoverage()
|
390910131/Misago
|
misago/users/avatars/dynamic.py
|
Python
|
gpl-2.0
| 2,147
| 0.000466
|
from importlib import import_module
import math
import os
from PIL import Image, ImageDraw, ImageColor, ImageFont, ImageFilter
from misago.conf import settings
from misago.users.avatars import store
def set_avatar(user):
name_bits = settings.MISAGO_DYNAMIC_AVATAR_DRAWER.split('.')
drawer_module = '.'.join(name_bits[:-1])
drawer_module = import_module(drawer_module)
drawer_function = getattr(drawer_module, name_bits[-1])
image = drawer_function(user)
store.store_new_avatar(user, image)
"""
Default drawer
"""
def draw_default(user):
image_size = max(settings.MISAGO_AVATARS_SIZES)
image = Image.new("RGBA", (image_size, image_size), 0)
image = draw_avatar_bg(user, image)
image = draw_avatar_flavour(user, image)
return image
COLOR_WHEEL = ('#d32f2f', '#c2185b', '#7b1fa2', '#512da8',
'#303f9f', '#1976d2', '#0288D1', '#0288d1',
'#0097a7', '#00796b', '#388e3c', '#689f38',
'#afb42b', '#fbc02d', '#ffa000', '#f57c00',
'#e64a19')
COLOR_WHEEL_LEN = len(COLOR_WHEEL)
def draw_avatar_bg(user, image):
image_size = image.size
color_index = user.pk - COLOR_WHEEL_LEN * (user.pk / COLOR_WHEEL_LEN)
main_color = COLOR_WHEEL[color_index]
rgb = ImageColor.getrgb(main_color)
bg_drawer = ImageDraw.Draw(image)
bg_drawer.rectangle([(0, 0), image_size], rgb)
return image
FONT_FILE = os.path.join(os.
|
path.dirname(__file__), 'font.ttf')
def draw_avatar_flavour(user, image):
string = user.username[0]
image_size = image.size[0]
size = int(image_size * 0.7)
font = ImageFont.truetype(FONT_FILE, size=size)
text_size = font.getsize(string)
text_pos = ((image_size - text_
|
size[0]) / 2,
(image_size - text_size[1]) / 2)
writer = ImageDraw.Draw(image)
writer.text(text_pos, string, font=font)
return image
"""
Some utils for drawring avatar programmatically
"""
CHARS = 'qwertyuiopasdfghjklzxcvbnm1234567890'
def string_to_int(string):
value = 0
for p, c in enumerate(string.lower()):
value += p * (CHARS.find(c))
return value
|
skyoo/jumpserver
|
apps/common/thread_pools.py
|
Python
|
gpl-2.0
| 538
| 0
|
from concurrent.futures import ThreadPoolExecutor
class SingletonThreadPoolExecutor(ThreadPoolExecutor):
"""
该类不要直接实例化
"""
def __new__(cls, max_workers=None, t
|
hread_name_prefix=None):
if cls is SingletonThreadPoolExecutor:
raise NotImplementedError
i
|
f getattr(cls, '_object', None) is None:
cls._object = ThreadPoolExecutor(
max_workers=max_workers,
thread_name_prefix=thread_name_prefix
)
return cls._object
|
lucperkins/heron
|
heron/shell/src/python/handlers/pmaphandler.py
|
Python
|
apache-2.0
| 1,144
| 0.004371
|
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright 2016 Twitter. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See
|
the License for the specific language governing permissions and
# limitations under the License.
''' pmaphandler.py '''
import json
import tornado.web
from heron.shell.src.python import utils
class PmapHandler(tornado.web.RequestHandler):
"""
Responsible for reporting memory map of a process given its pid.
"""
# pylint: disable=attribute-defined-outside-init
@t
|
ornado.web.asynchronous
def get(self, pid):
''' get method '''
body = utils.str_cmd(['pmap', '-pXX', pid], None, None)
self.content_type = 'application/json'
self.write(json.dumps(body))
self.finish()
|
JPETTomography/j-pet-gate-tools
|
examples/python_scripts/plot_sf_and_necr.py
|
Python
|
apache-2.0
| 7,141
| 0.028567
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
import matplotlib.pyplot as plt2
from numpy import *
from scipy import interpolate
from matplotlib import rcParams, rcParamsDefault
import argparse
from nema_common import *
outputformat = ""
def plot_rates(geometry,float_activity,N_true,N_dsca,N_psca,N_acci,time):
rcParams['font.size'] = 20
rcParams['legend.fontsize'] = 16
fig, axs = plt2.subplots(nrows=1, ncols=1, sharex=True)
plt2.subplots_adjust(left=0.2, right=0.95, top=0.95, bottom=0.17)
plt2.plot(float_activity,N_true/time/1000.,'o-',label="true", markersize=4)
plt2.plot(float_activity,N_dsca/time/1000.,'o-',label="dsca", markersize=4)
plt2.plot(float_activity,N_psca/time/1000.,'o-',label="psca", markersize=4)
plt2.plot(float_activity,N_acci/time/1000.,'o-',label="acci", markersize=4)
plt2.legend(loc=2)
plt2.xlim(0,90)
plt2.ylim(ymin=0)
plt2.xlabel("Activity concentration [kBq/cc]")
plt2.ylabel("Rate [kcps]")
plt2.savefig(workdir_NECR + geometry + "_rates." + outputformat)
plt2.clf()
plt2.close()
rcParams.update(rcParamsDefault)
def plot_necrs(float_activities, NECRs, colors, labels, necr_type, lstyles):
fig, axs = plt.subplots(nrows=1, ncols=1, sharex=True)
for i in xrange(len(NECRs)):
plt.plot(float_activities[i], NECRs[i], lstyles[i], color=colors[i], label=labels[i], markersize=4)
rcParams.update(rcParamsDefault)
rcParams['legend.fontsize'] = 11
rcParams['font.size'] = 20
FONTSIZE = 20
plt.subplots_adjust(left=0.2, right=0.95, top=0.95, bottom=0.17)
plt.legend(loc=1)
plt.xlim(0,90)
plt.ylim(0,1.1*NECR_sin_max)
plt.xticks(fontsize=FONTSIZE)
plt.yticks(fontsize=FONTSIZE)
plt.xlabel("Activity concentration [kBq/cc]", fontsize=FONTSIZE)
plt.ylabel("NECR [kcps]", fontsize=FONTSIZE)
plt.savefig(workdir_NECR + "NECR_all_geometries_" + necr_type + '.' + outputformat)
plt.clf()
plt.close()
def calculate_reduction_for_necr_simulations(necr_simulations):
for g in geometries_NECR:
sls_file = workdir_NECR + g + "/second_lvl_selection.txt"
if os.path.exists(sls_file):
os.system('rm ' + sls_file)
for a in activities_NECR:
coincidences_file = necr_simulations + "/" + g + "_" + a + "_NECR_COINCIDENCES_short"
tmp = loadtxt(coincidences_file)
posX1 = tmp[:,0]
posY1 = tmp[:,1]
times1 = tmp[:,3]
posX2 = tmp[:,4]
posY2 = tmp[:,5]
times2 = tmp[:,7]
[tim_diffs, ang_diffs] = calculate_differences(times1, times2, posX1, posY1, posX2, posY2)
[counter_above, counter_below] = calculate_counters(tim_diffs, ang_diffs)
with open(sls_file, "a") as myfile:
myfile.write("{0}\t{1}\t{2}\n".format(counter_above, counter_below, counter_above+counter_below))
print g + "\t" + a + "\t" + str(counter_above) + "\t" + str(counter_below) + "\t" + str(counter_above+counter_below)
def plot_reduction_for_necr_simulations():
rcParams['font.size'] = 24
rcParams['legend.fontsize'] = 18
activities = []
for a in activities_NECR:
activities.append(float(a)/22000.*1000) # in kBq/cc
new_activities = linspace(activities[0],activities[-1],100)
fig = plt.figure(figsize=(8, 6))
ax = fig.add_subplot(111)
plt.ylim(ymin=0,ymax=80)
plt.xlim(xmin=0,xmax=90)
for g in geometries_NECR:
lab = ""
c = ""
l = ""
if "1lay" in g:
lab += "1 layer, "
c = 'k'
elif "2lay" in g:
lab += "2 layers, "
c = 'r'
if "L020" in g:
lab += "L = 20 cm"
l = '--'
elif "L050" in g:
lab += "L = 50 cm"
l = '-'
elif "L100" in g:
lab += "L = 100 cm"
l = '-.'
elif "L200" in g:
lab += "L = 200 cm"
l = ':'
sls_file = workdir_NECR + g + "/second_lvl_selection.txt"
if os.path.exists(sls_file):
tmp = loadtxt(sls_file)
counter_above = tmp[:,0]
counter_below = tmp[:,1]
reduction = counter_below/(counter_above+counter_below)*100.
new_reduction = interpolate.splev(new_activities, interpolate.splrep(activities, reduction, s=5), der=0)
plt.plot(new_activities, new_reduction, linestyle=l, color=c, label=lab)
plt.legend(loc=4)
plt.xlabel("Activity concentration [kBq/cc]")
plt.ylabel("Reduction [%]")
plt.savefig(workdir_NECR + "second_lvl_selection" + outputformat, bbox_inches='tight')
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Plot NECR.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-of', '--outputformat',
type=str,
default="png",
help='output format of images')
parser.add_argument('-r', '--reduction',
dest='reduction',
action='store_true',
help='set if you want to calculate and plot the reduction ' +
'given by the 2nd level of the reduction method in case ' +
'of the NECR simulations (must be used with -ns option)')
parser.add_argument('-ns', '--necr-simulations',
dest='necr_simulations',
type=str,
help='path to the base directory of the NECR simulations')
args = parser.parse_args()
outputformat = args.outputformat
if args.reduction:
create_work_directories()
calculate_reduction_for_necr_simulations(args.necr_simulations)
plot_reduction_for_necr_simulations()
else:
plt.subplots(nrows=1, ncols=1, sharex=True)
float_activities = []
NECRs_sin = []
NECRs_ctr = []
colors = []
labels = []
lstyles = []
for geometry in geometries_NECR:
tmp = loadtxt(workdir_NECR + geometry + "/necr_dependency.txt")
float_activity = tmp[:,0]
SF_sin = tmp[:,1]
SF_crt = tmp[:,2]
NECR_sin = tmp[:,3]
NECR_sin_max = max(NECR_sin)
NECR
|
_ctr = tmp[:,4]
T = tmp[:,5]
S = tmp[:,6]
N_true = tmp[:,7]
N_dsca = tmp[:,8]
N_psca = tmp[:,9]
N_acci = tmp[:,10]
time
|
= tmp[:,11]
plot_rates(geometry,float_activity,N_true,N_dsca,N_psca,N_acci,time)
new_label = ""
if "1lay" in geometry:
linestyle='o-'
new_label += "1 layer"
else:
linestyle = 'o--'
new_label += "2 layers"
if "L020" in geometry:
datacolor = 'r'
new_label += ", L = 20 cm"
elif "L050" in geometry:
datacolor = 'b'
new_label += ", L = 50 cm"
elif "L100" in geometry:
datacolor = 'y'
new_label += ", L = 100 cm"
elif "L200" in geometry:
datacolor = 'g'
new_label += ", L = 200 cm"
float_activities.append(float_activity)
NECRs_sin.append(NECR_sin)
NECRs_ctr.append(NECR_ctr)
colors.append(datacolor)
labels.append(new_label)
lstyles.append(linestyle)
plot_necrs(float_activities, NECRs_sin, colors, labels, "sin", lstyles)
plot_necrs(float_activities, NECRs_ctr, colors, labels, "ctr", lstyles)
|
thetomcraig/redwood
|
web/node_modules/hiredis/build/c4che/build.config.py
|
Python
|
isc
| 434
| 0.002304
|
vers
|
ion = 0x105016
tools = [{'tool': 'ar', 'tooldir': None, 'funs': None}, {'tool': 'cc', 'tooldir': None, 'funs': None}, {'tool': 'gcc', 'tooldir': None, 'funs': None}, {'tool': 'compiler_cc', 'tooldir': None, 'funs': None}, {'tool': 'cxx', 'tooldir': None, 'funs': None}, {'tool': 'gxx', 'tooldir': None, 'funs': None}, {'tool': 'compiler_cxx', 'tooldir': None, 'funs': None}, {'tool': 'node_ad
|
don', 'tooldir': None, 'funs': None}]
|
mushkevych/scheduler
|
synergy/mx/freerun_action_handler.py
|
Python
|
bsd-3-clause
| 4,312
| 0.001623
|
__author__ = 'Bohdan Mushkevych'
import json
from synergy.db.model.freerun_process_entry import FreerunProcessEntry
from synergy.db.dao.freerun_process_dao import FreerunProcessDao
from synergy.mx.base_request_handler import valid_action_request
from synergy.mx.abstract_action_handler import AbstractActionHandler
from synergy.scheduler.scheduler_constants import STATE_MACHINE_FREERUN
class FreerunActi
|
onHandler(AbstractActionHandler):
def __init__(self, request, **values):
super(FreerunActionHandler, self).__init__(request, **values)
self.process_name = self.request_arguments.get('process_name')
self.entry_name = self.request_arguments.get('entry_name')
self.freerun_process_dao = FreerunProcessDao(self.logger)
|
self.is_request_valid = True if self.process_name and self.entry_name else False
if self.is_request_valid:
self.process_name = self.process_name.strip()
self.entry_name = self.entry_name.strip()
self.is_requested_state_on = self.request_arguments.get('is_on') == 'on'
@AbstractActionHandler.thread_handler.getter
def thread_handler(self):
handler_key = (self.process_name, self.entry_name)
return self.scheduler.freerun_handlers[handler_key]
@AbstractActionHandler.process_entry.getter
def process_entry(self):
return self.thread_handler.process_entry
@AbstractActionHandler.uow_id.getter
def uow_id(self):
return self.process_entry.related_unit_of_work
@valid_action_request
def cancel_uow(self):
freerun_state_machine = self.scheduler.timetable.state_machines[STATE_MACHINE_FREERUN]
freerun_state_machine.cancel_uow(self.process_entry)
return self.reply_ok()
@valid_action_request
def get_event_log(self):
return {'event_log': self.process_entry.event_log}
@valid_action_request
def create_entry(self):
process_entry = FreerunProcessEntry()
process_entry.process_name = self.process_name
process_entry.entry_name = self.entry_name
if self.request_arguments['arguments']:
arguments = self.request_arguments['arguments']
if isinstance(arguments, bytes):
arguments = arguments.decode('unicode-escape')
process_entry.arguments = json.loads(arguments)
else:
process_entry.arguments = {}
process_entry.description = self.request_arguments['description']
process_entry.is_on = self.is_requested_state_on
process_entry.trigger_frequency = self.request_arguments['trigger_frequency']
self.freerun_process_dao.update(process_entry)
self.scheduler._register_process_entry(process_entry, self.scheduler.fire_freerun_worker)
return self.reply_ok()
@valid_action_request
def delete_entry(self):
handler_key = (self.process_name, self.entry_name)
self.thread_handler.deactivate()
self.freerun_process_dao.remove(handler_key)
del self.scheduler.freerun_handlers[handler_key]
self.logger.info(f'MX: Deleted FreerunThreadHandler for {handler_key}')
return self.reply_ok()
@valid_action_request
def update_entry(self):
is_interval_changed = self.process_entry.trigger_frequency != self.request_arguments['trigger_frequency']
if self.request_arguments['arguments']:
arguments = self.request_arguments['arguments']
if isinstance(arguments, bytes):
arguments = arguments.decode('unicode-escape')
self.process_entry.arguments = json.loads(arguments)
else:
self.process_entry.arguments = {}
self.process_entry.description = self.request_arguments['description']
self.process_entry.is_on = self.is_requested_state_on
self.process_entry.trigger_frequency = self.request_arguments['trigger_frequency']
self.freerun_process_dao.update(self.process_entry)
if is_interval_changed:
self.change_interval()
if self.process_entry.is_on != self.is_requested_state_on:
if self.is_requested_state_on:
self.activate_trigger()
else:
self.deactivate_trigger()
return self.reply_ok()
|
txtbits/daw-python
|
primeros ejercicios/Ejercicios entradasalida/ejercicio6.py
|
Python
|
mit
| 445
| 0.008989
|
#Calculen el área de un rectángulo (alineado con los ejes x e y) dad
|
as sus coordenadas x1,x2,y1,y2.
from math import pi
print 'Ejercicio 6'
print '-'*60
x1 = float(raw_input('Introduce x1: '))
x2 = float(raw_input('Introduce x2: '))
y1 = float(raw_input('Introduce y1: '))
y2 = float(raw_input('Introduce y2: '))
base= x2-x1
altura= y2-y1
print 'El area
|
del rectangulo es: ', base * altura
raw_input('Pulse la tecla enter para finalizar')
|
Wittlich/DAT210x-Python
|
Module6/assignment5.py
|
Python
|
mit
| 2,871
| 0.00418
|
import pandas as pd
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn import tree
from subprocess import call
# https://archive.ics.uci.edu/ml/machine-learning-databases/mushroom/agaricus-lepiota.names
#
# TODO: Load up the mushroom dataset into dataframe 'X'
# Verify you did it
|
properly.
# Indices shouldn't be doubled.
# Header information is on the dataset's website at the UCI ML Repo
# Check NA Encoding
X = pd.read_csv('Datasets/agaricus-lepiota.data', names=['label', 'cap-shape', 'cap-surface', 'cap-color',
'bruises', 'odor', 'gill-attachment
|
',
'gill-spacing', 'gill-size', 'gill-color',
'stalk-shape', 'stalk-root',
'stalk-surface-above-ring',
'stalk-surface-below-ring', 'stalk-color-above-ring',
'stalk-color-below-ring', ' veil-type', 'veil-color',
'ring-number', 'ring-type', 'spore-print-colo', 'population',
'habitat'], header=None)
# INFO: An easy way to show which rows have nans in them
# print X[pd.isnull(X).any(axis=1)]
#
# TODO: Go ahead and drop any row with a nan
X.replace(to_replace='?', value=np.NaN, inplace=True)
X.dropna(axis=0, inplace=True)
print(X.shape)
#
# TODO: Copy the labels out of the dset into variable 'y' then Remove
# them from X. Encode the labels, using the .map() trick we showed
# you in Module 5 -- canadian:0, kama:1, and rosa:2
X['label'] = X['label'].map({'e': 1, 'p': 0})
y = X['label'].copy()
X.drop(labels=['label'], axis=1, inplace=True)
#
# TODO: Encode the entire dataset using dummies
X = pd.get_dummies(X)
#
# TODO: Split your data into test / train sets
# Your test size can be 30% with random_state 7
# Use variable names: X_train, X_test, y_train, y_test
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=7)
#
# TODO: Create an DT classifier. No need to set any parameters
model = tree.DecisionTreeClassifier()
#
# TODO: train the classifier on the training data / labels:
# TODO: score the classifier on the testing data / labels:
model.fit(X_train, y_train)
score = model.score(X_test, y_test)
print('High-Dimensionality Score: %f' % round((score * 100), 3))
#
# TODO: Use the code on the courses SciKit-Learn page to output a .DOT file
# Then render the .DOT to .PNGs. Ensure you have graphviz installed.
# If not, `brew install graphviz. If you can't, use: http://webgraphviz.com/
tree.export_graphviz(model.tree_, out_file='tree.dot', feature_names=X.columns)
|
yephper/django
|
tests/m2m_through_regress/tests.py
|
Python
|
bsd-3-clause
| 10,746
| 0.00214
|
from __future__ import unicode_literals
from django.contrib.auth.models import User
from django.core import management
from django.test import TestCase
from django.utils.six import StringIO
from .models import (
Car, CarDriver, Driver, Group, Membership, Person, UserMembership,
)
class M2MThroughTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name="Bob")
cls.jim = Person.objects.create(name="Jim")
cls.rock = Group.objects.create(name="Rock")
cls.roll = Group.objects.create(name="Roll")
cls.frank = User.objects.create_user("frank", "frank@example.com", "password")
cls.jane = User.objects.create_user("jane", "jane@example.com", "password")
# normal intermediate model
cls.bob_rock = Membership.objects.create(person=cls.bob, group=cls.rock)
cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll, price=50)
cls.jim_rock = Membership.objects.create(person=cls.jim, group=cls.rock, price=50)
# intermediate model with custom id column
cls.frank_rock = UserMembership.objects.create(user=cls.frank, group=cls.rock)
cls.frank_roll = UserMembership.objects.create(user=cls.frank, group=cls.roll)
cls.jane_rock = UserMembership.objects.create(user=cls.jane, group=cls.rock)
def test_retrieve_reverse_m2m_items(self):
self.assertQuerysetEqual(
self.bob.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
def test_retrieve_forward_m2m_items(self):
se
|
lf.assertQuerysetEqual(
self.roll.members.all(), [
"<Person: Bob>",
]
)
def test_cannot_use_setattr_on_reverse_m2m_with_intermediary_model(self):
msg = (
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's Manager "
"instead."
)
with self.assertRaisesMessage(AttributeError, msg):
|
self.bob.group_set.set([])
def test_cannot_use_setattr_on_forward_m2m_with_intermediary_model(self):
msg = (
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's Manager "
"instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.roll.members.set([])
def test_cannot_use_create_on_m2m_with_intermediary_model(self):
with self.assertRaises(AttributeError):
self.rock.members.create(name="Anne")
def test_cannot_use_create_on_reverse_m2m_with_intermediary_model(self):
with self.assertRaises(AttributeError):
self.bob.group_set.create(name="Funk")
def test_retrieve_reverse_m2m_items_via_custom_id_intermediary(self):
self.assertQuerysetEqual(
self.frank.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
def test_retrieve_forward_m2m_items_via_custom_id_intermediary(self):
self.assertQuerysetEqual(
self.roll.user_members.all(), [
"<User: frank>",
]
)
def test_join_trimming_forwards(self):
"Check that we don't involve too many copies of the intermediate table when doing a join. Refs #8046, #8254"
self.assertQuerysetEqual(
self.rock.members.filter(membership__price=50), [
"<Person: Jim>",
]
)
def test_join_trimming_reverse(self):
self.assertQuerysetEqual(
self.bob.group_set.filter(membership__price=50), [
"<Group: Roll>",
]
)
class M2MThroughSerializationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name="Bob")
cls.roll = Group.objects.create(name="Roll")
cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll)
def test_serialization(self):
"m2m-through models aren't serialized as m2m fields. Refs #8134"
pks = {"p_pk": self.bob.pk, "g_pk": self.roll.pk, "m_pk": self.bob_roll.pk}
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(
out.getvalue().strip(),
'[{"pk": %(m_pk)s, "model": "m2m_through_regress.membership", "fields": {"person": %(p_pk)s, "price": '
'100, "group": %(g_pk)s}}, {"pk": %(p_pk)s, "model": "m2m_through_regress.person", "fields": {"name": '
'"Bob"}}, {"pk": %(g_pk)s, "model": "m2m_through_regress.group", "fields": {"name": "Roll"}}]'
% pks
)
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="xml",
indent=2, stdout=out)
self.assertXMLEqual(out.getvalue().strip(), """
<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="%(m_pk)s" model="m2m_through_regress.membership">
<field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">%(p_pk)s</field>
<field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">%(g_pk)s</field>
<field type="IntegerField" name="price">100</field>
</object>
<object pk="%(p_pk)s" model="m2m_through_regress.person">
<field type="CharField" name="name">Bob</field>
</object>
<object pk="%(g_pk)s" model="m2m_through_regress.group">
<field type="CharField" name="name">Roll</field>
</object>
</django-objects>
""".strip() % pks)
class ToFieldThroughTests(TestCase):
def setUp(self):
self.car = Car.objects.create(make="Toyota")
self.driver = Driver.objects.create(name="Ryan Briscoe")
CarDriver.objects.create(car=self.car, driver=self.driver)
# We are testing if wrong objects get deleted due to using wrong
# field value in m2m queries. So, it is essential that the pk
# numberings do not match.
# Create one intentionally unused driver to mix up the autonumbering
self.unused_driver = Driver.objects.create(name="Barney Gumble")
# And two intentionally unused cars.
self.unused_car1 = Car.objects.create(make="Trabant")
self.unused_car2 = Car.objects.create(make="Wartburg")
def test_to_field(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
def test_to_field_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
def test_to_field_clear_reverse(self):
self.driver.car_set.clear()
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
def test_to_field_clear(self):
self.car.drivers.clear()
self.assertQuerysetEqual(
self.car.drivers.all(), [])
# Low level tests for _add_items and _remove_items. We test these methods
# because .add/.remove aren't available for m2m fields with through, but
# through is the only way to set to_field currently. We do want to make
# sure these methods are ready if the ability to use .add or .remove with
# to_field relations is added some day.
def test_add(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
# Yikes - barney is going to drive...
self.car.drivers._add_items('car', 'driver', self.unused_driver)
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Barney Gumble>", "<Driver: Ryan Briscoe>"]
)
def test_add_null(self):
nullcar = Car.objects.create(make=None)
with se
|
one-love/api
|
devel.py
|
Python
|
gpl-3.0
| 116
| 0
|
from config import configs
from onelove import create_app
config = configs['development']
app = create_app
|
(config)
|
|
corradomonti/fbvoting
|
fbvoting/__main__.py
|
Python
|
mit
| 5,860
| 0.008191
|
# -*- coding: utf-8 -*-
import logging
from flask import Flask, redirect, request, jsonify, render_template, session
import requests
import fbvoting.pagebuilders.buildhome
import fbvoting.pagebuilders.buildoverview
import fbvoting.pagebuilders.buildprofile
import fbvoting.pagebuilders.buildfriends
import fbvoting.pagebuilders.buildrank
import fbvoting.pagebuilders.buildvote
import fbvoting.pagebuilders.builderrors
import fbvoting.pagebuilders.ajaxfeedback
import fbvoting.pagebuilders.jsonchart
import fbvoting.pagebuilders.commons
from fbvoting.mylogging import report, report_view
from fbvoting.apis.youtube import youtube_search
from fbvoting.lib import add_GET_params, url_for, as_full_url
from fbvoting.serverutils import refresh_token
from fbvoting.serverutils import configure_app
from fbvoting.admininterface import activate_admin_interface
from fbvoting.apis.fb import store_oauth_token
logger = logging.getLogger(__name__)
requests = requests.session()
app, debugged_app = configure_app(Flask(__name__))
route = lambda url: app.route(url, methods=['GET', 'POST'])
activate_admin_interface(route)
#### HANDLING LOGIN ####
@route('/auth')
def store_auth_token():
return store_oauth_token(request.form.get('token'))
#### PAGES ####
@route('/')
@report_view
def index():
return fbvoting.pagebuilders.buildhome.build_home()
@route('/ajax/intro')
@report_view
def home_intro():
return fbvoting.pagebuilders.buildhome.build_intro()
@route('/profile')
@refresh_token
@report_view
def egoboost():
return fbvoting.pagebuilders.buildprofile.build_profile()
@route('/friends')
@refresh_token
@report_view
def friends():
return fbvoting.pagebuilders.buildfriends.build_friends()
@route('/votes')
@report_view
@refresh_token
def overview():
return fbvoting.pagebuilders.buildoverview.build_overview()
@route('/votes/<category>')
@refresh_token
@report_view
def vote(category):
return fbvoting.pagebuilders.buildvote.build_vote(category)
@app.route('/savevote', methods=["POST"])
def savevote():
parameters = fbvoting.pagebuilders.buildvote.savevote(request.form)
url = request.args.get('next', url_for('overview'))
if not url.startswith('https'):
url = as_full_url(url)
url = add_GET_params(url, parameters)
if 'token' in request.args:
url = add_GET_params(url, {'token': request.args['token']} )
report.mark('saved-vote')
return redirect(url)
@route('/recommendation')
@refresh_token
@report_view
def recommendation():
return fbvoting.pagebuilders.buildrank.build_recommendation_overview()
@route('/chart')
@refresh_token
@report_view
def chart():
return fbvoting.pagebuilders.buildrank.build_chart_overview()
@route('/chart/<category>')
@refresh_token
@report_view
def category_chart(category):
if request.args.get('query'):
return fbvoting.pagebuilders.buildrank.build_category_chart_from_query(category, request.args.get('query'))
else:
return fbvoting.pagebuilders.buildrank.build_category_chart(category,
int(request.args.get('page', 0)),
playfirst=bool(request.args.get('playfirst'))
)
@route('/recommendation/<category>')
@refresh_token
@report_view
def category_recommendation(category):
page = int(request.args.get('page', 0))
return fbvoting.pagebuilders.buildrank.build_category_recommendation(category, page,
playfirst=bool(request.args.get('playfirst')))
@route('/about')
@report_view
def about():
data = fbvoting.pagebuilders.commons.get_base_data()
data['active_section'] = 'about'
data['activate_fb'] = False
return render_template('about.html', **data)
#### ajax ####
@route('/ajax/check-token/')
def check_token_in_cookies():
return jsonify({'results': 'oauth_token' in session})
@route('/ajax/
|
musicbrainz/log/update')
def musicbrainz_logger():
|
fbvoting.apis.musicbrainz.log_update()
return "OK\n"
@route('/ajax/musicbrainz/check/')
def musicbrainz_check_if_exist():
return jsonify({'results': fbvoting.pagebuilders.buildvote.check_with_suggestion(request.form)})
@route('/ajax/musicbrainz/search/artist')
def musicbrainz_search_artist():
query = request.args.get('q','')
return jsonify( {'query': query, 'suggestions':
fbvoting.apis.musicbrainz.search_artists(query)
} )
@route('/ajax/musicbrainz/search/song')
def musicbrainz_search_song():
query = request.args.get('q','')
return jsonify( {'query': query, 'suggestions':
fbvoting.apis.musicbrainz.search_songs(
query,
request.args.get('artist'),
category=request.args.get('category','')
)
} )
@route('/ajax/youtube/search')
def ajax_youtube_search():
return jsonify({ 'results': youtube_search(
request.args['q'],
max_results=request.args.get('max-results', 4)
) } )
@route('/ajax/rank/<rank_type>/<category>')
def ajax_ranks(rank_type, category):
return jsonify({
'results': fbvoting.pagebuilders.jsonchart.get_ranks(rank_type, category,
page=int(request.args.get('page', 0)))
})
@route('/feedback/put/<category>/<song>')
def feedback(category, song):
return fbvoting.pagebuilders.ajaxfeedback.put_feedback(category, song, request.args)
@route('/feedback/get/<category>/<song>')
def get_rating(category, song):
return fbvoting.pagebuilders.ajaxfeedback.get_rating(category, song)
## ERROR PAGES ##
for error_code in (404, 500):
app.error_handler_spec[None][error_code] = lambda _ : fbvoting.pagebuilders.builderrors.build_error(error_code)
###############
logger.info("FBVoting server is now ready.")
if fbvoting.conf.DEBUG:
logger.warn("We are in DEBUG mode.")
if __name__ == '__main__':
app.run(host='0.0.0.0', port=fbvoting.conf.PORT)
|
lucian1900/Webified
|
messenger.py
|
Python
|
gpl-2.0
| 5,665
| 0.005296
|
#
# Copyright (C) 2007, One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
import logging
import dbus
from dbus.gobject_service import ExportedGObject
import base64
SERVICE = "org.laptop.WebActivity"
IFACE = SERVICE
PATH = "/org/laptop/WebActivity"
_logger = logging.getLogger('messenger')
class Messenger(ExportedGObject):
def __init__(self, tube, is_initiator, model):
ExportedGObject.__init__(self, tube, PATH)
self.tube = tube
self.is_initiator = is_initiator
self.members = []
self.entered = False
self.model = model
self.bus_name = None
self.tube.watch_participants(self.participant_change_cb)
def participant_change_cb(self, added, removed):
_logger.debug('Participants change add=%s rem=%s'
%(added, removed))
for handle, bus_name in added:
_logger.debug('Add member handle=%s bus_name=%s'
%(str(handle), str(bus_name)))
self.members.append(bus_name)
for handle in removed:
_logger.debug('Remove member %r', handle)
try:
self.members.remove(self.tube.participants[handle])
except ValueError:
_logger.debug('Remove member %r - already absent', handle)
if not self.entered:
self.tube.add_signal_receiver(self._add_link_receiver, '_add_link',
IFACE, path=PATH,
sender_keyword='sender',
byte_arrays=True)
self.bus_name = self.tube.get_unique_name()
if self.is_initiator:
_logger.debug('Initialising a new shared browser, I am %s .'
%self.tube.get_unique_name())
else:
# sync with other members
_logger.debug('Joined I am %s .'%self.bus_name)
for member in self.members:
if member != self.bus_name:
_logger.debug('Get info from %s' %member)
s
|
elf.tube.get_object(member, PATH).sync_with_members(
self.model.get_links_ids(), dbus_interface=IFACE,
reply_handler=self.reply_sync, error_handler=lambda
e:self.error_sync(e, 'transfering file'))
self.entered =
|
True
def reply_sync(self, a_ids, sender):
a_ids.pop()
for link in self.model.data['shared_links']:
if link['hash'] not in a_ids:
self.tube.get_object(sender, PATH).send_link(
link['hash'], link['url'], link['title'], link['color'],
link['owner'], link['thumb'], link['timestamp'])
def error_sync(self, e, when):
_logger.error('Error %s: %s'%(when, e))
@dbus.service.method(dbus_interface=IFACE, in_signature='as',
out_signature='ass', sender_keyword='sender')
def sync_with_members(self, b_ids, sender=None):
'''Sync with members '''
b_ids.pop()
# links the caller wants from me
for link in self.model.data['shared_links']:
if link['hash'] not in b_ids:
self.tube.get_object(sender, PATH).send_link(
link['hash'], link['url'], link['title'], link['color'],
link['owner'], link['thumb'], link['timestamp'])
a_ids = self.model.get_links_ids()
a_ids.append('')
# links I want from the caller
return (a_ids, self.bus_name)
@dbus.service.method(dbus_interface=IFACE, in_signature='ssssssd',
out_signature='')
def send_link(self, identifier, url, title, color, owner, buf, timestamp):
'''Send link'''
a_ids = self.model.get_links_ids()
if identifier not in a_ids:
thumb = base64.b64decode(buf)
self.model.add_link(url, title, thumb, owner, color, timestamp)
@dbus.service.signal(IFACE, signature='sssssd')
def _add_link(self, url, title, color, owner, thumb, timestamp):
'''Signal to send the link information (add)'''
_logger.debug('Add Link: %s '%url)
def _add_link_receiver(self, url, title, color, owner, buf, timestamp,
sender=None):
'''Member sent a link'''
handle = self.tube.bus_name_to_handle[sender]
if self.tube.self_handle != handle:
thumb = base64.b64decode(buf)
self.model.add_link(url, title, thumb, owner, color, timestamp)
_logger.debug('Added link: %s to linkbar.'%(url))
|
darkstar007/GroundStation
|
src/ReceiverEvent.py
|
Python
|
gpl-3.0
| 1,854
| 0.002157
|
#
# Copyright 2013/2015 Matthew Nottingham
#
# This file is part of GroundStation
#
# GroundStation is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GroundStation is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GroundStation; see the file COPYING. If not, write to
# th
|
e Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
import ephem
import GnuRadio2
class ReceiverEvent():
def __init__(self, time, duration, frequency, bandwidth):
self.freq = frequency
self.d
|
uration = 60.0 * duration # Lets keep it in seconds.
self.bandwidth = bandwidth
self.channels = []
self.timerInterval = (time - ephem.now()) * 24.0 * 60.0 * 60.0 * 1000
self.timer = QtCore.QTimer(self)
self.timer.timeout.connect(self.startReceiving)
self.timer.setSingleShot(True)
self.timer.start(self.timerInterval)
def addChannel(self, channel):
self.channels.append(channel)
def startReceiving(self):
self.timer.stop()
self.timer.timeout.connect(self.stopReceiving)
self.timer.start(self.duration * 1000)
for c in self.channels:
c.startCountdown()
self.rx = GnuRadio2.Receiver(frequency, bandwidth)
self.rx.start()
def stopReceiving(self):
# Loop through all the channels and make sure they're dead?...
self.rx.stop()
|
duke605/RunePy
|
commands/vorago.py
|
Python
|
mit
| 3,188
| 0.001882
|
from discord.ext import commands
from math import floor
from datetime import datetime
from util.arguments import Arguments
from shlex import split
class Vorago:
def __init__(self, bot):
self.bot = bot
@commands.command(aliases=['rago'],
description='Shows the current rotation of Vorago and what the next one will be.')
async def vorago(self, *, msg=''):
parser = Arguments(allow_abbrev=False, prog='vorago')
parser.add_argument('-H', '--hard-mode', action='store_true', help='Shows hardmode rotations.')
try:
args = parser.parse_args(split(msg))
except SystemExit:
await self.bot.say('```%s```' % parser.format_help())
return
except Exception as e:
await self.bot.say('```%s```' % str(e))
return
await self.execute(args)
async def execute(self, args):
rotations = (
{
'type': 'Ceiling Collapse',
'unlock': 'Torso of Omens',
'p10': ('Team Split', 'Green Bomb'),
'p11': ('Team Split', 'Vitalis')
},
{
'type': 'Scopulus',
'unlock': 'Helm of Omens',
'p10': ('Purple Bomb', 'Team Split'),
'p11': ('Purple Bomb', 'Vitalis')
},
{
'type': 'Vitalis',
'unlock': 'Legs of Omens',
'p10': ('Vitalis', 'Purple Bomb'),
'p11': ('Vitalis', 'bleeds')
},
{
'type': 'Green Bomb',
'unlock': 'Boots of Omens',
'p10': ('Green Bomb', 'Vitalis'),
'p11': ('Green Bomb', 'Team Split')
},
{
'type': 'Team Split',
'unlock': 'Maul of Omens',
'p10': ('Team Split', 'Team Split'),
'p11': ('Team Sp
|
lit', 'Purple Bomb')
},
{
'type': 'The End',
'unlock': 'Gloves of Omens',
'p10': ('Purple Bomb', 'Bleeds'),
'p11': ('Purple Bomb', 'Vitalis')
}
)
ms = round(datetime.utcnow().timestamp() * 1000)
current = floor((((floor(floor(ms / 1000) / (24 * 60 * 60))) - 6) % (7 * len(rotations))) / 7)
days_until = 7 - ((floor((ms / 1000) / (24 * 60 * 60))
|
) - 6) % (7 * len(rotations)) % 7
next = current + 1 if current + 1 < len(rotations) else 0
m = '**Curent Rotation**: %s.\n' % rotations[current]['type']
m += '**Next Rotation**: %s in `%s` day%s.' % (rotations[next]['type'], days_until,
's' if days_until == 1 else '')
# Adding hard mode information
if args.hard_mode:
m += '\n\n**__Hard mode__**:\n'
m += '**Phase 10**: %s + %s.\n' % rotations[current]['p10']
m += '**Phase 11**: %s + %s.\n' % rotations[current]['p11']
m += '**Unlock**: %s' % rotations[current]['unlock']
await self.bot.say(m)
def setup(bot):
bot.add_cog(Vorago(bot))
|
isislovecruft/switzerland
|
switzerland/lib/shrunk_scapy/data.py
|
Python
|
gpl-3.0
| 5,617
| 0.016023
|
## This file is part of Scapy
## See http://www.secdev.org/projects/scapy for more informations
## Copyright (C) Philippe Biondi <phil@secdev.org>
## This program is published under a GPLv2 license
import re
from dadict import DADict
from error import log_load
|
ing
############
## Consts ##
############
ETHER_ANY = "\x00"*6
ETHER_BROADCAST = "\xff"*6
ETH_P_ALL = 3
ETH_P_IP = 0x800
ETH_P_ARP = 0x806
ETH_P_IPV6 = 0x86dd
# From
|
net/if_arp.h
ARPHDR_ETHER = 1
ARPHDR_METRICOM = 23
ARPHDR_PPP = 512
ARPHDR_LOOPBACK = 772
ARPHDR_TUN = 65534
# From net/ipv6.h on Linux (+ Additions)
IPV6_ADDR_UNICAST = 0x01
IPV6_ADDR_MULTICAST = 0x02
IPV6_ADDR_CAST_MASK = 0x0F
IPV6_ADDR_LOOPBACK = 0x10
IPV6_ADDR_GLOBAL = 0x00
IPV6_ADDR_LINKLOCAL = 0x20
IPV6_ADDR_SITELOCAL = 0x40 # deprecated since Sept. 2004 by RFC 3879
IPV6_ADDR_SCOPE_MASK = 0xF0
#IPV6_ADDR_COMPATv4 = 0x80 # deprecated; i.e. ::/96
#IPV6_ADDR_MAPPED = 0x1000 # i.e.; ::ffff:0.0.0.0/96
IPV6_ADDR_6TO4 = 0x0100 # Added to have more specific info (should be 0x0101 ?)
IPV6_ADDR_UNSPECIFIED = 0x10000
MTU = 1600
# file parsing to get some values :
def load_protocols(filename):
spaces = re.compile("[ \t]+|\n")
dct = DADict(_name=filename)
try:
for l in open(filename):
try:
shrp = l.find("#")
if shrp >= 0:
l = l[:shrp]
l = l.strip()
if not l:
continue
lt = tuple(re.split(spaces, l))
if len(lt) < 2 or not lt[0]:
continue
dct[lt[0]] = int(lt[1])
except Exception,e:
log_loading.info("Couldn't parse file [%s]: line [%r] (%s)" % (filename,l,e))
except IOError:
log_loading.info("Can't open /etc/protocols file")
return dct
IP_PROTOS=load_protocols("/etc/protocols")
def load_ethertypes(filename):
spaces = re.compile("[ \t]+|\n")
dct = DADict(_name=filename)
try:
f=open(filename)
for l in f:
try:
shrp = l.find("#")
if shrp >= 0:
l = l[:shrp]
l = l.strip()
if not l:
continue
lt = tuple(re.split(spaces, l))
if len(lt) < 2 or not lt[0]:
continue
dct[lt[0]] = int(lt[1], 16)
except Exception,e:
log_loading.info("Couldn't parse file [%s]: line [%r] (%s)" % (filename,l,e))
f.close()
except IOError,msg:
pass
return dct
ETHER_TYPES=load_ethertypes("/etc/ethertypes")
def load_services(filename):
spaces = re.compile("[ \t]+|\n")
tdct=DADict(_name="%s-tcp"%filename)
udct=DADict(_name="%s-udp"%filename)
try:
f=open(filename)
for l in f:
try:
shrp = l.find("#")
if shrp >= 0:
l = l[:shrp]
l = l.strip()
if not l:
continue
lt = tuple(re.split(spaces, l))
if len(lt) < 2 or not lt[0]:
continue
if lt[1].endswith("/tcp"):
tdct[lt[0]] = int(lt[1].split('/')[0])
elif lt[1].endswith("/udp"):
udct[lt[0]] = int(lt[1].split('/')[0])
except Exception,e:
log_loading.warning("Couldn't file [%s]: line [%r] (%s)" % (filename,l,e))
f.close()
except IOError:
log_loading.info("Can't open /etc/services file")
return tdct,udct
TCP_SERVICES,UDP_SERVICES=load_services("/etc/services")
class ManufDA(DADict):
def fixname(self, val):
return val
def _get_manuf_couple(self, mac):
oui = ":".join(mac.split(":")[:3]).upper()
return self.__dict__.get(oui,(mac,mac))
def _get_manuf(self, mac):
return self._get_manuf_couple(mac)[1]
def _get_short_manuf(self, mac):
return self._get_manuf_couple(mac)[0]
def _resolve_MAC(self, mac):
oui = ":".join(mac.split(":")[:3]).upper()
if oui in self:
return ":".join([self[oui][0]]+ mac.split(":")[3:])
return mac
def load_manuf(filename):
try:
manufdb=ManufDA(_name=filename)
for l in open(filename):
try:
l = l.strip()
if not l or l.startswith("#"):
continue
oui,shrt=l.split()[:2]
i = l.find("#")
if i < 0:
lng=shrt
else:
lng = l[i+2:]
manufdb[oui] = shrt,lng
except Exception,e:
log_loading.warning("Couldn't parse one line from [%s] [%r] (%s)" % (filename, l, e))
except IOError:
#log_loading.warning("Couldn't open [%s] file" % filename)
pass
return manufdb
#####################
## knowledge bases ##
#####################
class KnowledgeBase:
def __init__(self, filename):
self.filename = filename
self.base = None
def lazy_init(self):
self.base = ""
def reload(self, filename = None):
if filename is not None:
self.filename = filename
oldbase = self.base
self.base = None
self.lazy_init()
if self.base is None:
self.base = oldbase
def get_base(self):
if self.base is None:
self.lazy_init()
return self.base
|
jEschweiler/Urease
|
urease_software/cluster.py
|
Python
|
gpl-3.0
| 5,877
| 0.052408
|
import numpy as np
import scipy.spatial.distance as dist
from matplotlib.backends.backend_pdf import PdfPages
import matplotlib.pyplot as plt
import matplotlib.lines as mplines
import scipy.cluster.hierarchy as clust
import os
def kabsch(coord, ref,app):
C = np.dot(np.transpose(coord), ref)
V, S, W = np.linalg.svd(C)
#print("VSW", V,S,W)
d = (np.linalg.det(V) * np.linalg.det(W)) < 0.0
if d:
S[-1] = -S[-1]
V[:,-1] = -V[:,-1]
# Create Rotation matrix U
U = np.dot(V, W)
# Rotate coord
kcoord = np.dot(app, U)
return kcoord
def rmsd(coord, ref):
sd = (coord - ref)**2
ssd = np.mean(sd)
rmsd = np.sqrt(ssd)
return rmsd
#colors = [(1,.4,.4),(.4,.4,1),(.4,1,.4),(1,.4,1),(.4,1,1),(1,.7,.4),(1,.4,.7)]
colors = [(0,.6,.6),(1,0,.5),(1,1,.2),(1,1,.2),(.8,.4,0),(.6,1,1),(.8,0,.8),(0,.9,0),(0,.6,.6),(1,0,.5),(1,1,.2),(1,1,.2),(.8,.4,0),(.6,1,1),(.8,0,.8),(0,.9,0),(0,.6,.6),(1,0,.5),(1,1,.2),(1,1,.2),(.8,.4,0),(.6,1,1),(.8,0,.8),(0,.9,0)]
def writepym(i,coords,radii):
pymfilename= i + ".pym"
pymfile=open(pymfilename, "w")
pymfile.write('from pymol.cgo import *'+ '\n')
pymfile.write('from pymol import cmd'+ '\n')
pymfile.write('from pymol.vfont import plain' + '\n' + 'data={}' + '\n' + "curdata=[]" + '\n')
#print(x for x in enumerate(coords))
for item in enumerate(coords):
#print(colors[item[0]][0],colors[item[0]][1], colors[item[0]][2])
#print(colors[item[0]][0])
#print(item)
pymfile.write("k='Protein" + str(item[0]) + " geometry'" +'\n'+ "if not k in data.keys():" +'\n'+" data[k]=[]"+'\n'+'curdata=['+'\n'+'COLOR,' + str(colors[item[0]%8][0])+","+str(colors[item[0]%8][1])+","+ str(colors[item[0]%8][2])+"," + '\n' + 'SPHERE,'+ str(item[1][0])+ ','+ str(item[1][1])+',' + str(item[1][2])+','+ str(radii[item[0]]) +'\n')
pymfile.write("]"+"\n"+"k='Protein" + str(item[0]) + " geometry'" + '\n' + "if k in data.keys():" + "\n" + " data[k]= data[k]+curdata"+'\n'+"else:" +'\n' +" data[k]= curdata"+"\n")
pymfile.write("for k in data.keys():" + "\n" + " cmd.load_cgo(data[k], k, 1)" +"\n"+ "data= {}")
pymfile.close()
files=os.listdir(".")
#refs=[x for x in files if x.endswith('1k90_refcoords.npy')]
np.set_printoptions(threshold=1000000)
pdf = PdfPages("corrected.pdf")
# Read the pairwise distance matrix (discard row and column labels).
#fname = "corrected-res.csv"
distmat = np.load("rmsdmat.npy")
# Calculate the mean of the pairwise similarities.
ii = np.tril_indices(distmat.shape[0], -1)
pwise = distmat[ii]
mdist = np.mean(pwise)
print(mdist)
#print(pwise)
# Generate a historgram of the pairwise similarities.
plt.clf()
plt.hist(pwise, 20, color='lightblue')
plt.xlabel("Similarity")#, size=17)
plt.ylabel("Frequency")#, size=17)
pdf.savefig()
# Do the clustering
h = clust.average(distmat)
# Plot the dendrogram
plt.figure(figsize=(16,10))
plt.figure(linewidth=100.0)
plt.clf()
ax = plt.axes()
for pos in 'right','bottom','top':
ax.spines[pos].set_color('none')
ax.xaxis.set_ticks_position('none')
ax.yaxis.set_ticks_position('left')
ax.spines['left'].set_position(('outward', 10))
x=clust.dendrogram(h)
#plt.getp(x)
pdf.savefig()
pdf.close()
#ll = clust.leaves_list(h)
#print(len(ll))
tree = clust.to_tree(h)
#print(tree)
#ctree = clust.cut_tree(h, height = 150)
#print(np.shape(ctree))
ctree = clust.cut_tree(h, n_clusters = 2)
leaves = clust.leaves_list(h)
#print(np.shape(ctree))
ctree = np.reshape(ctree, len(leaves))
#print(np.shape(leaves))
#print(np.shape(ctree))
#print(np.vstack((leaves,ctree)))
files=os.listdir(".")
files=[x for x in files if x.startswith('tetramer_model_')]
print(len(files))
n_clusters = np.max(ctree) + 1
#print(n_clusters)
clusters = [[] for i in range(n_clusters)]
CCC = np.array([2,3,10,11,18,19])
AC3 = np.array([0,2,3,8,10,11,16,18,19])
#MDFG = np.array([4,5,6,7,12,13,14,15,20,21,22,23])
##actually MD
MDFG = np.array([4,5,12,13,20,21])
for i, leaf in enumerate(leaves):
cluster = ctree[i]
structure = np.load("goodmodels0.npy")[i]
# print(len(clusters))
# print(cluster)
clusters[cluster].append(structure)
rmsdlist = []
coordlist = []
for clust in clusters:
l = len(clust)
av = round(l / 2, -1)
av = int(av)
crmsdlist = []
alignedcoordlist = []
for o,st in enumerate(clust):
strmsdlist = []
stCst = st[CCC]
stC = stCst - np.mean(stCst, axis = 0)
st3 = st - np.mean(st, axis = 0)
#ik = i[np.array([2,7,12])]
#ikm = ik - np.mean(ik, axis = 0)
#im = i - np.mean(i, axis = 0)
#print(i)
for st2 in clust:
st2Cst = st2[CCC]
st2C = st2Cst - np.mean(st2Cst, axis = 0)
st23 = st2 - np.mean(st2Cst, axis = 0)
k = kabsch(st2C, stC, st23)
k = k - np.mean(k, axis =0)
#r2 = rmsd(k[np.array([3,4,8,9,13,14])], st3[np.array([3,4,8,9,13,14])])
r = rmsd(k, st3)
#print(r, r2)
#r = rmsd(st, k)
strmsdlist.append(r)
if o == av:
alignedcoordlist.append(k)
|
#print(r)
#jm = j - np.mean(j, axis = 0
|
)
#jk = j[np.array([2,7,12])]
#jkm = jk - np.mean(jk, axis = 0)
#k = kabsch(jkm, ikm, jm)
#k = k - np.mean(k, axis =0)
#r = rmsd(k[np.array([3,4,8,9,13,14])], im[np.array([3,4,8,9,13,14])])
#r2 = rmsd(k[np.array([2,7,12])], im[np.array([2,7,12])])
#print(i)
#print(r, r2)
#rmsdlist1.append(r)
crmsdlist.append(strmsdlist)
#print(alignedcoordlist)
rmsdlist.append(crmsdlist)
coordlist.append(alignedcoordlist)
radii = np.load("radii.npy")
clustcoords = []
for i,item in enumerate(coordlist):
print(np.shape(item))
mean = np.mean(item, axis = 0)
med = round(len(item)/2)
writepym("cluster_mean_"+str(i), mean, radii)
#writepym("cluster_med_"+str(i), item[med],radii)
#print(item))
np.save("cluster_"+str(i)+".npy", item)
#print("std ", np.std(item, axis = 0))
clustcoords.append(mean)
np.save("clust_av_coordsn.npy",clustcoords)
m = []
for cl in rmsdlist:
mean = np.mean(cl)
m.append(mean)
print(mean)
print(np.mean(m))
|
timxx/gitc
|
qgitc/gitutils.py
|
Python
|
apache-2.0
| 14,220
| 0
|
# -*- coding: utf-8 -*-
from collections import defaultdict
import subprocess
import os
import bisect
import re
from .common import log_fmt
class GitProcess():
GIT_BIN = None
def __init__(self, repoDir, args, text=None):
startupinfo = None
if os.name == "nt":
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
self._process = subprocess.Popen(
[GitProcess.GIT_BIN] + args,
cwd=repoDir,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
startupinfo=startupinfo,
universal_newlines=text)
@property
def process(self):
return self._process
@property
def returncode(self):
return self._process.returncode
def communicate(self):
return self._process.communicate()
class Ref():
INVALID = -1
TAG = 0
HEAD = 1
REMOTE = 2
def __init__(self, type, name):
self._type = type
self._name = name
def __str__(self):
string = "type: {0}\n".format(self._type)
string += "name: {0}".format(self._name)
return string
def __lt__(self, other):
return self._type < other._type
@property
def type(self):
return self._type
@type.setter
def type(self, type):
self._type = type
@property
def name(self):
return self._name
@name.setter
def name(self, name):
self._name = name
@classmethod
def fromRawString(cls, string):
if not string or len(string) < 46:
return None
sha1 = string[0:40]
name = string[41:]
if not name.startswith("refs/"):
return None
name = name[5:]
_type = Ref.INVALID
_name = None
if name.startswith("heads/"):
_type = Ref.HEAD
_name = name[6:]
elif name.startswith("remotes") \
and not name.endswith("HEAD"):
_type = Ref.REMOTE
_name = name
elif name.startswith("tags/"):
_type = Ref.TAG
if name.endswith("^{}"):
_name = name[5:-3]
else:
_name = name[5:]
else:
return None
return cls(_type, _name)
class Git():
REPO_DIR = os.getcwd()
REPO_TOP_DIR = os.getcwd()
REF_MAP = {}
REV_HEAD = None
# local uncommitted changes
LUC_SHA1 = "0000000000000000000000000000000000000000"
# local changes checked
LCC_SHA1 = "0000000000000000000000000000000000000001"
@staticmethod
def available():
return GitProcess.GIT_BIN is not None
@staticmethod
def run(args, text=None):
return GitProcess(Git.REPO_DIR, args, text)
@staticmethod
def checkOutput(args, text=None):
process = Git.run(args, text)
data = process.communicate()[0]
if process.returncode != 0:
return None
return data
@staticmethod
def repoTopLevelDir(directory):
"""get top level repo directory
if @directory is not a repository, None returned"""
if not os.path.isdir(directory):
return None
if not os.path.exists(directory):
return None
args = ["rev-parse", "--show-toplevel"]
process = GitProcess(directory, args)
realDir = process.communicate()[0]
if process.returncode != 0:
return None
return realDir.decode("utf-8").replace("\n", "")
@staticmethod
def refs():
args = ["show-ref", "-d"]
data = Git.checkOutput(args)
if not data:
return None
lines = data.decode("utf-8").split('\n')
refMap = defaultdict(list)
for line in lines:
ref = Ref.fromRawString(line)
if not ref:
continue
sha1 = line[0:40]
bisect.insort(refMap[sha1], ref)
return refMap
@staticmethod
def revHead():
args = ["rev-parse", "HEAD"]
data = Git.checkOutput(args)
if not data:
return None
return data.decode("utf-8").rstrip('\n')
@staticmethod
def branches():
args = ["branch", "-a"]
data = Git.checkOutput(args)
if not data:
return None
return data.decode("utf-8").split('\n')
@staticmethod
def commitSummary(sha1):
fmt = "%h%x01%s%x01%ad%x01%an%x01%ae"
args = ["show", "-s",
"--pretty=format:{0}".format(fmt),
"--date=short", sha1]
data = Git.checkOutput(args)
if not data:
return None
parts = data.decode("utf-8").split("\x01")
return {"sha1": parts[0],
"subject": parts[1],
"date": parts[2],
"author": parts[3],
"email": parts[4]}
@staticmethod
def abbrevCommit(sha1):
args = ["show", "-s", "--pretty=format:%h", sha1]
data = Git.checkOutput(args)
if not data:
return sha1[:7]
return data.rstrip().decode("utf-8")
@staticmethod
def commitSubject(sha1):
args = ["show", "-s", "--pretty=format:%s", sha1]
data = Git.checkOutput(args)
return data
@staticmethod
def commitRawDiff(sha1, filePath=None, gitArgs=None):
if sha1 == Git.LCC_SHA1:
args = ["diff-index", "--cached", "HEAD"]
elif sha1 == Git.LUC_SHA1:
args = ["diff-files"]
else:
args = ["diff-tree", "-r", "--root", sha1]
args.extend(["-p", "--textconv", "--submodule",
"-C", "--no-commit-id", "-U3"])
if gitArgs:
args.extend(gitArgs)
if filePath:
args.append("--")
args.append(filePath)
data = Git.checkOutput(args)
if not data:
return None
return data
@staticmethod
def externalDiff(branchDir, commit, path=None, tool=None):
args = ["difftool", "--no-prompt"]
if commit.sha1 == Git.LUC_SHA1:
pass
elif commit.sha1 == Git.LCC_SHA1:
args.append("--cached")
else:
args.append("{0}^..{0}".format(commit.sha1))
if tool:
args.append("--tool={}".format(tool))
if pa
|
th:
args.append("--")
args.append(path)
cwd = branchDir if branchDir else Git.REPO_DIR
process = GitProcess(cwd, args)
@staticmethod
def conflictFiles():
args = ["diff", "--name-only",
"--diff-filter=U",
"-n
|
o-color"]
data = Git.checkOutput(args)
if not data:
return None
return data.rstrip(b'\n').decode("utf-8").split('\n')
@staticmethod
def gitDir():
args = ["rev-parse", "--git-dir"]
data = Git.checkOutput(args)
if not data:
return None
return data.rstrip(b'\n').decode("utf-8")
@staticmethod
def gitPath(name):
dir = Git.gitDir()
if not dir:
return None
if dir[-1] != '/' and dir[-1] != '\\':
dir += '/'
return dir + name
@staticmethod
def mergeBranchName():
"""return the current merge branch name"""
# TODO: is there a better way?
path = Git.gitPath("MERGE_MSG")
if not os.path.exists(path):
return None
name = None
with open(path, "r") as f:
line = f.readline()
m = re.match("Merge.* '(.*)'.*", line)
if m:
name = m.group(1)
# likely a sha1
if name and re.match("[a-f0-9]{7,40}", name):
data = Git.checkOutput(["branch", "--remotes",
"--contains", name])
if data:
data = data.rstrip(b'\n')
if data:
# might have more than one branch
name = data.decode("utf-8").split('\n')[0].strip()
return name
@staticmethod
|
google/pigweed
|
pw_protobuf/py/pw_protobuf/codegen_pwpb.py
|
Python
|
apache-2.0
| 25,350
| 0.000118
|
# Copyright 2020 The Pigweed Authors
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
"""This module defines the generated code for pw_protobuf C++ classes."""
import abc
from datetime import datetime
import enum
import os
import sys
from typing import Dict, Iterable, List, Tuple
from typing import cast
from google.protobuf import descriptor_pb2
from pw_protobuf.output_file import OutputFile
from pw_protobuf.proto_tree import ProtoEnum, ProtoMessage, ProtoMessageField
from pw_protobuf.proto_tree import ProtoNode
from pw_protobuf.proto_tree import build_node_tree
PLUGIN_NAME = 'pw_protobuf'
PLUGIN_VERSION = '0.1.0'
PROTO_H_EXTENSION = '.pwpb.h'
PROTO_CC_EXTENSION = '.pwpb.cc'
PROTOBUF_NAMESPACE = '::pw::protobuf'
class EncoderType(enum.Enum):
MEMORY = 1
STREAMING = 2
def base_class_name(self) -> str:
"""Returns the base class used by this encoder
|
type."""
if self is self.STREAMING:
return 'StreamEncoder'
if self is self.MEMORY:
return 'MemoryEncoder'
raise ValueError('Unknown encoder type')
def codegen_class_name(self) -> str:
"""Returns the base class used by this encoder type."""
if self is self.STREAMING:
return 'StreamEncoder'
if self is self.MEMORY:
|
return 'MemoryEncoder'
raise ValueError('Unknown encoder type')
# protoc captures stdout, so we need to printf debug to stderr.
def debug_print(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
class ProtoMethod(abc.ABC):
"""Base class for a C++ method for a field in a protobuf message."""
def __init__(
self,
field: ProtoMessageField,
scope: ProtoNode,
root: ProtoNode,
):
"""Creates an instance of a method.
Args:
field: the ProtoMessageField to which the method belongs.
scope: the ProtoNode namespace in which the method is being defined.
"""
self._field: ProtoMessageField = field
self._scope: ProtoNode = scope
self._root: ProtoNode = root
@abc.abstractmethod
def name(self) -> str:
"""Returns the name of the method, e.g. DoSomething."""
@abc.abstractmethod
def params(self) -> List[Tuple[str, str]]:
"""Returns the parameters of the method as a list of (type, name) pairs.
e.g.
[('int', 'foo'), ('const char*', 'bar')]
"""
@abc.abstractmethod
def body(self) -> List[str]:
"""Returns the method body as a list of source code lines.
e.g.
[
'int baz = bar[foo];',
'return (baz ^ foo) >> 3;'
]
"""
@abc.abstractmethod
def return_type(self, from_root: bool = False) -> str:
"""Returns the return type of the method, e.g. int.
For non-primitive return types, the from_root argument determines
whether the namespace should be relative to the message's scope
(default) or the root scope.
"""
@abc.abstractmethod
def in_class_definition(self) -> bool:
"""Determines where the method should be defined.
Returns True if the method definition should be inlined in its class
definition, or False if it should be declared in the class and defined
later.
"""
def should_appear(self) -> bool: # pylint: disable=no-self-use
"""Whether the method should be generated."""
return True
def param_string(self) -> str:
return ', '.join([f'{type} {name}' for type, name in self.params()])
def field_cast(self) -> str:
return 'static_cast<uint32_t>(Fields::{})'.format(
self._field.enum_name())
def _relative_type_namespace(self, from_root: bool = False) -> str:
"""Returns relative namespace between method's scope and field type."""
scope = self._root if from_root else self._scope
type_node = self._field.type_node()
assert type_node is not None
# If a class method is referencing its class, the namespace provided
# must be from the root or it will be empty.
if type_node == scope:
scope = self._root
ancestor = scope.common_ancestor(type_node)
namespace = type_node.cpp_namespace(ancestor)
assert namespace
return namespace
class SubMessageEncoderMethod(ProtoMethod):
"""Method which returns a sub-message encoder."""
def name(self) -> str:
return 'Get{}Encoder'.format(self._field.name())
def return_type(self, from_root: bool = False) -> str:
return '{}::StreamEncoder'.format(
self._relative_type_namespace(from_root))
def params(self) -> List[Tuple[str, str]]:
return []
def body(self) -> List[str]:
line = 'return {}::StreamEncoder(GetNestedEncoder({}));'.format(
self._relative_type_namespace(), self.field_cast())
return [line]
# Submessage methods are not defined within the class itself because the
# submessage class may not yet have been defined.
def in_class_definition(self) -> bool:
return False
class WriteMethod(ProtoMethod):
"""Base class representing an encoder write method.
Write methods have following format (for the proto field foo):
Status WriteFoo({params...}) {
return encoder_->Write{type}(kFoo, {params...});
}
"""
def name(self) -> str:
return 'Write{}'.format(self._field.name())
def return_type(self, from_root: bool = False) -> str:
return '::pw::Status'
def body(self) -> List[str]:
params = ', '.join([pair[1] for pair in self.params()])
line = 'return {}({}, {});'.format(self._encoder_fn(),
self.field_cast(), params)
return [line]
def params(self) -> List[Tuple[str, str]]:
"""Method parameters, defined in subclasses."""
raise NotImplementedError()
def in_class_definition(self) -> bool:
return True
def _encoder_fn(self) -> str:
"""The encoder function to call.
Defined in subclasses.
e.g. 'WriteUint32', 'WriteBytes', etc.
"""
raise NotImplementedError()
class PackedWriteMethod(WriteMethod):
"""A method for a writing a packed repeated field.
Same as a WriteMethod, but is only generated for repeated fields.
"""
def should_appear(self) -> bool:
return self._field.is_repeated()
def _encoder_fn(self) -> str:
raise NotImplementedError()
#
# The following code defines write methods for each of the
# primitive protobuf types.
#
class DoubleWriteMethod(WriteMethod):
"""Method which writes a proto double value."""
def params(self) -> List[Tuple[str, str]]:
return [('double', 'value')]
def _encoder_fn(self) -> str:
return 'WriteDouble'
class PackedDoubleWriteMethod(PackedWriteMethod):
"""Method which writes a packed list of doubles."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const double>', 'values')]
def _encoder_fn(self) -> str:
return 'WritePackedDouble'
class FloatWriteMethod(WriteMethod):
"""Method which writes a proto float value."""
def params(self) -> List[Tuple[str, str]]:
return [('float', 'value')]
def _encoder_fn(self) -> str:
return 'WriteFloat'
class PackedFloatWriteMethod(PackedWriteMethod):
"""Method which writes a packed list of floats."""
def params(self) -> List[Tuple[str, str]]:
return [('std::span<const float>', 'values')]
def _encoder_fn(self) -> str:
return '
|
wubr2000/googleads-python-lib
|
examples/adwords/v201502/campaign_management/set_criterion_bid_modifier.py
|
Python
|
apache-2.0
| 2,408
| 0.007475
|
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the
|
"License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# dis
|
tributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example sets a bid modifier for the mobile platform on given campaign.
To get campaigns, run get_campaigns.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
from googleads import adwords
CAMPAIGN_ID = 'INSERT_CAMPAIGN_ID_HERE'
BID_MODIFIER = '1.5'
def main(client, campaign_id, bid_modifier):
# Initialize appropriate service.
campaign_criterion_service = client.GetService(
'CampaignCriterionService', version='v201502')
# Create mobile platform.The ID can be found in the documentation.
# https://developers.google.com/adwords/api/docs/appendix/platforms
mobile = {
'xsi_type': 'Platform',
'id': '30001'
}
# Create campaign criterion with modified bid.
campaign_criterion = {
'campaignId': campaign_id,
'criterion': mobile,
'bidModifier': bid_modifier
}
# Create operations.
operations = [
{
'operator': 'SET',
'operand': campaign_criterion
}
]
# Make the mutate request.
result = campaign_criterion_service.mutate(operations)
# Display the resulting campaign criteria.
for campaign_criterion in result['value']:
print ('Campaign criterion with campaign id \'%s\' and criterion id \'%s\' '
'was updated with bid modifier \'%s\'.'
% (campaign_criterion['campaignId'],
campaign_criterion['criterion']['id'],
campaign_criterion['bidModifier']))
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, CAMPAIGN_ID, BID_MODIFIER)
|
Eficent/odoomrp-wip
|
mrp_project_link_mto/models/stock_move.py
|
Python
|
agpl-3.0
| 1,072
| 0
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT AN
|
Y WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR
|
PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, fields
class StockMove(models.Model):
_inherit = 'stock.move'
main_project_id = fields.Many2one('project.project',
string="Main Project")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.