repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
waseem18/oh-mainline | vendor/packages/scrapy/scrapy/telnet.py | Python | agpl-3.0 | 2,808 | 0.002137 | """
Scrapy Telnet Console extension
See documentation in docs/topics/telnetconsole.rst
"""
import pprint
from twisted.conch import manhole, telnet
from twisted.conch.insults import insults
from twisted.internet import protocol
from scrapy.xlib.pydispatch import dispatcher
from scrapy.exceptions import NotConfigured
from scrapy.stats import stats
from scrapy import log, signals
from scrapy.utils.signal import send_catch_log
from scrapy.utils.trackref import print_live_refs
from scrapy.utils.engine import print_engine_status
from scrapy.utils.reactor import listen_tcp
try:
import guppy
hpy = guppy.hpy()
except ImportError:
hpy = None
# signal to update telnet variables
# args: telnet_vars
update_telnet_vars = object()
class TelnetConsole(protocol.ServerFactory):
def __init__(self, crawler):
if not crawler.settings.getbool('TELNETCONSOLE_ENABLED'):
raise NotConfigured
self.crawler = crawler
self.noisy = False
self.portrange = map(int, crawler.settings.getlist('TELNETCONSOLE_PORT'))
self.host = crawler.settings['TELNETCONSOLE_HOST']
dispatcher.connect(self.start_listening, signals.engine_started)
dispatcher.connect(self.stop_listening, signals.engine_stopped)
@classmethod
def from_crawler(cls, crawler):
return cls(crawler)
def start_listening(self):
self.port = listen_tcp(self.portrange, self.host, self)
h = self.port.getHost()
log.msg("Telnet console listening on %s:%d" % (h.host, h.port), log.DEBUG)
def stop_listening(self):
self.port.stopListening()
def protocol(self):
telnet_vars = self._get_telnet_vars()
return telnet.TelnetTransport(telnet.TelnetBootstrapProtocol,
insults.ServerProtocol, manhole.Manhole, telnet_vars)
def _get_telnet_vars(self):
# Note: if you add entries here also update topics/telnetconsole.rst
slots = self.crawler.engine.slots
if len(slots) == 1:
spider, slot = slots.items()[0]
telnet_vars = {
'engine': self.crawler.engine,
'spider': spider,
'slot': slot,
'manager': self.crawler,
'extensions': self.crawler.extensions,
'stats': stats,
'spiders': self.crawler.spiders,
'settings': self.crawler.settings,
'est': lambda: prin | t_engine_status(self.crawler.engine),
'p': pprint.pprint,
'prefs': print_live_refs,
'hpy': hpy,
'help': "This is Scrapy telnet console. For more info see: " \
"http://doc.scrapy.org/topics/telnetconsole.html", # see #284
| }
send_catch_log(update_telnet_vars, telnet_vars=telnet_vars)
return telnet_vars
|
jokerYellow/TranslateFiles | test/conversion_test.py | Python | mit | 4,082 | 0.003791 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import os
import unittest
class OpenCCTest(unittest.TestCase):
def setUp(self):
self.openCC = OpenCC()
def test_hk2s(self):
self.openCC.set_conversion('hk2s')
words = '香煙(英語:Cigarette),為煙草製品的一種。滑鼠是一種很常見及常用的電腦輸入設備。'
self.assertEqual(self.openCC.convert(words), '香烟(英语:Cigarette),为烟草制品的一种。滑鼠是一种很常见及常用的电脑输入设备。')
def test_s2hk(self):
self.openCC.set_conversion('s2hk')
words = '香烟(英语:Cigarette),为烟草制品的一种。鼠标是一种很常见及常用的电脑输入设备。'
self.assertEqual(self.openCC.convert(words), '香煙(英語:Cigarette),為煙草製品的一種。鼠標是一種很常見及常用的電腦輸入設備。')
def test_s2t(self):
self.openCC.set_conversion('s2t')
words = '香烟(英语:Cigarette),为烟草制品的一种。鼠标是一种很常见及常用的电脑输入设备。'
self.assertEqual(self.openCC.convert(words), '香菸(英語:Cigarette),爲菸草製品的一種。鼠標是一種很常見及常用的電腦輸入設備。')
def test_s2tw(self):
| self.openCC.set_conversion('s2tw')
words = '香烟(英语:Cigarette),为烟草制品的一种。鼠标是一种很常见及常用的电脑输入设备。'
self.assertEqual(self.openCC.convert(words), '香菸(英語:Cigarette),為菸草製品的一種。鼠標是一種很常見及常用的電腦輸入設備。')
def test_s2twp(self):
self.openCC.set_conversion('s2twp')
words = '香烟(英语:Cigarette),为烟草制品的一种。內存是一种很常见及常用的电脑输入设备。'
self.assertEqual(self.openCC.convert(words), '香菸(英語:Cigarette),為菸草製品的一種。記憶體是一種很常見及 | 常用的電腦輸入裝置。')
def test_t2hk(self):
self.openCC.set_conversion('t2hk')
words = '香菸(英語:Cigarette),爲菸草製品的一種。滑鼠是一種很常見及常用的電腦輸入裝置。'
self.assertEqual(self.openCC.convert(words), '香煙(英語:Cigarette),為煙草製品的一種。滑鼠是一種很常見及常用的電腦輸入裝置。')
def test_t2s(self):
self.openCC.set_conversion('t2s')
words = '香菸(英語:Cigarette),爲菸草製品的一種。滑鼠是一種很常見及常用的電腦輸入裝置。'
self.assertEqual(self.openCC.convert(words), '香烟(英语:Cigarette),为烟草制品的一种。滑鼠是一种很常见及常用的电脑输入装置。')
def test_t2tw(self):
self.openCC.set_conversion('t2tw')
words = '香菸(英語:Cigarette),爲菸草製品的一種。鼠標是一種很常見及常用的電腦輸入設備。'
self.assertEqual(self.openCC.convert(words), '香菸(英語:Cigarette),為菸草製品的一種。鼠標是一種很常見及常用的電腦輸入設備。')
def test_tw2s(self):
self.openCC.set_conversion('tw2s')
words = '香菸(英語:Cigarette),為菸草製品的一種。滑鼠是一種很常見及常用的電腦輸入裝置。'
self.assertEqual(self.openCC.convert(words), '香烟(英语:Cigarette),为烟草制品的一种。滑鼠是一种很常见及常用的电脑输入装置。')
def test_tw2sp(self):
self.openCC.set_conversion('tw2sp')
words = '香菸(英語:Cigarette),為菸草製品的一種。記憶體是一種很常見及常用的電腦輸入裝置。'
self.assertEqual(self.openCC.convert(words), '香烟(英语:Cigarette),为烟草制品的一种。内存是一种很常见及常用的电脑输入设备。')
if __name__ == '__main__':
sys.path.append(os.pardir)
from opencc import OpenCC
unittest.main()
|
cas4ey/behavior-studio | source/remote_debugger/debugger_mode.py | Python | gpl-3.0 | 2,171 | 0.003224 | # coding=utf-8
# -----------------
# file : debugger_mode.py
# date : 2014/11/18
# author : Victor | Zarubkin
# contact : victor.zarubkin@gmail.com
# copyright : Copyright (C) 2014 Victor Zarubkin
# license : This file is part of BehaviorStudio.
# :
# : BehaviorStudio is free software: you can redistribute it and/or modify
# : it under the terms of the GNU General Public License as published by
# : the Free Software Foundation, either version 3 of the License, or
# | : (at your option) any later version.
# :
# : BehaviorStudio is distributed in the hope that it will be useful,
# : but WITHOUT ANY WARRANTY; without even the implied warranty of
# : MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# : GNU General Public License for more details.
# :
# : You should have received a copy of the GNU General Public License
# : along with BehaviorStudio. If not, see <http://www.gnu.org/licenses/>.
# :
# : A copy of the GNU General Public License can be found in file COPYING.
############################################################################
"""
"""
__author__ = 'Victor Zarubkin'
__copyright__ = 'Copyright (C) 2014 Victor Zarubkin'
__credits__ = ['Victor Zarubkin']
__license__ = ['GPLv3']
__version__ = '1.2.1' # this is last application version when this script file was changed
__email__ = 'victor.zarubkin@gmail.com'
############################################################################
class DebuggerMode:
# Display state for currently running nodes
CurrentState = 0
# Display current state for nodes that are currently running
# and display last finish state for nodes that are not running
MixedState = 1
def __init__(self):
pass
#######################################################################################################################
#######################################################################################################################
|
lidavidm/mathics-heroku | venv/lib/python2.7/site-packages/sympy/strategies/tree.py | Python | gpl-3.0 | 3,710 | 0.001348 | from functools import partial
from sympy.strategies import chain, minimize
import sympy.strategies.branch as branch
from sympy.strategies.branch import yieldify
identity = lambda x: x
def treeapply(tree, join, leaf=identity):
""" Apply functions onto recursive containers (tree)
join - a dictionary mapping container types to functions
e.g. ``{list: minimize, tuple: chain}``
Keys are containers/iterables. Values are functions [a] -> a.
Examples
--------
>>> from sympy.strategies.tree import treeapply
>>> tree = [(3, 2), (4, 1)]
>>> treeapply(tree, {list: max, tuple: min})
2
>>> add = lambda *args: sum(args)
>>> def mul(*args):
... total = 1
... for arg in args:
... total *= arg
... return total
>>> treeapply(tree, {list: mul, tuple: add})
25
"""
for typ in join:
if isinstance(tree, typ):
return join[typ](*map(partial(treeapply, join=join, leaf=leaf),
tree))
return leaf(tree)
def greedy(tree, objective=identity, **kwargs):
""" Execute a strategic tree. Select alternatives greedily
Trees
-----
Nodes in a tree can be either
function - a leaf
list - a selection among operations
tuple - a sequence of chained operations
Textual examples
----------------
Text: Run f, then run g, e.g. ``lambda x: g(f(x))``
Code: ``(f, g)``
Text: Run either f or g, whichever minimizes the objective
Code: ``[f, g]``
Textx: Run either f or g, whichever is better, then run h
Code: ``([f, g], h)``
Text: Either expand then simplify or try factor then foosimp. Finally print
Code: ``([(expand, simplify), (factor, foosimp)], print)``
Objective
---------
"Better" is determined by the objective keyword. This function makes
choices to minimize the objective. It defaults to the identity.
Example
-------
>>> from sympy.strategies.tree import greedy
>>> inc = lambda x: x + 1
>>> dec = lambda x: x - 1
>>> double = lambda x: 2*x
>>> tree = [inc, (dec, double)] # either inc or dec-then-double
>>> fn = greedy(tree)
>>> fn(4) # lowest value comes from the inc
5
>>> fn(1) # lowest value comes from dec then double
0
This funcion selects between options in a tuple. The result is chosen that
minimizes the objective function.
>>> fn = greedy(tree, objective=lambda x: -x) # maximize
>>> fn(4) # highest value comes from the dec then double
6
>>> fn(1) # highest value comes from the inc
2
Greediness
----------
This is a greedy algorithm. In the example:
([a, b], c) # do either a or b, then do c
the choice between running ``a`` or ``b`` is made without foresight to c
"""
optimize = partial(minimize, objective=objective)
return treeapply(tree, {list: optimize, tuple: chain}, **kwargs)
def allresults(tree, leaf=yieldify):
""" Execute a strategic tree. Return all possibilities.
Returns a lazy iterator of all possible results
Exhaustiveness
--------------
T | his is an exhaustive algorithm. In the example
([a, b], [c, d])
All of the results from
(a, c), (b, c), (a, d), (b, d)
are returned. This can lead to combinatorial blowup.
See sympy.strategies.greedy for details on input
"""
return treeapply(tree, {list: branch.multiplex, tuple: branch.chain},
leaf=leaf)
def brute(tree, objective=identi | ty, **kwargs):
return lambda expr: min(tuple(allresults(tree, **kwargs)(expr)),
key=objective)
|
venicegeo/eventkit-cloud | eventkit_cloud/utils/services/errors.py | Python | bsd-3-clause | 454 | 0.002203 | class ServiceError(Exception):
"""Base class for exceptions in this module."""
pass
class UnsupportedFormatError(ServiceError):
"""Used to raise exceptions when a response doesn't match expected semantics or for failed version checks."""
pass
clas | s MissingLayerError(ServiceError):
"""Used if expected layer could not be found in the service."""
def __init__(self, message):
| self.message = message
|
fedora-infra/tahrir-api | alembic/versions/3c7fd5b4e2c2_add_two_new_columns_.py | Python | gpl-3.0 | 1,034 | 0.003868 | """Add two new columns for Person.
Revision ID: 3c7fd5b4e2c2
Revises: | 24282792d72a
Create Date: 2013-06-26 14:46:28.361709
"""
# revision identifiers, used by Alembic.
revision = "3c7fd5b4e2c2"
down_revision = "16943d9088cf"
import tahrir_api
from alembic import op
import sqlalchemy as sa
import datetime
def upgrade():
op.add_column("persons", sa.Column("created_on", sa.DateTime()))
op.add_column("persons", sa.Column("opt_out", | sa.Boolean()))
# We have to do this manually because I can't figure out how to make
# alembic apply defaults to sqlite.
engine = op.get_bind().engine
session_maker = sa.orm.sessionmaker(bind=engine)
session = sa.orm.scoped_session(session_maker)
persons = session.query(tahrir_api.model.Person).all()
for person in persons:
# Set our defaults
person.created_on = datetime.datetime.now()
person.opt_out = False
session.commit()
def downgrade():
op.drop_column("persons", "opt_out")
op.drop_column("persons", "created_on")
|
booi/aracna | pypose-old/aracna-python/driver.py | Python | gpl-3.0 | 5,702 | 0.003858 | #!/usr/bin/env python
"""
PyPose: Serial driver for connection to arbotiX board or USBDynamixel.
Copyright (c) 2008,2009 Michael E. Ferguson. All right reserved.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
"""
import serial
import time
import sys
from binascii import b2a_hex
from ax12 import *
class Driver:
""" Class to open a serial port and control AX-12 servos
through an arbotiX board or USBDynamixel. """
def __init__(self, port="/dev/ttyUSB0",baud=38400, interpolation=False, direct=False):
""" This may throw errors up the line -- that's a good thing. """
self.ser = serial.Serial()
self.ser.baudrate = baud
self.ser.port = port
self.ser.timeout = 0.5
self.ser.open()
self.error = 0
self.hasInterpolation = interpolation
self.direct = direct
def execute(self, index, ins, params):
""" Send an instruction to a device. """
self.ser.flushInput()
length = 2 + len(params)
checksum = 255 - ((index + length + ins + sum(params))%256)
self.ser.write(chr(0xFF)+chr(0xFF)+chr(index)+chr(length)+chr(ins))
for val in params:
self.ser.write(chr(val))
self.ser.write(chr(checksum))
return self.getPacket(0)
def setReg(self, index, regstart, values):
""" Set the value of registers. Should be called as such:
ax12.setReg(1,1,(0x01,0x05)) """
self.execute(index, AX_WRITE_DATA, [regstart] + values)
return self.error
def getPacket(self, mode, id=-1, leng=-1, error=-1, params = None):
""" Read a return packet, iterative attempt """
# need a positive byte
d = self.ser.read()
if d == '':
print "Fail Read"
return None
# now process our byte
if mode == 0: # get our first 0xFF
if ord(d) == 0xff:
print "Oxff found"
return self.getPacket(1)
else:
print "Oxff NOT found, restart: " + str(ord(d))
return self.getPacket(0)
elif mode == 1: # get our second 0xFF
if ord(d) == 0xff:
print "Oxff found"
return self.getPacket(2)
else:
print "Oxff NOT found, restart: " + str(ord(d))
return self.getPacket(0)
elif mode == 2: # get id
if d != 0xff:
print "ID found: " + str(ord(d))
return self.getPacket(3, ord(d))
else:
print "0xff is not ID, restart"
return self.getPacket(0)
elif mode == 3: # get length
print "Length found: " + str(ord(d))
return self.getPacket(4, id, ord(d))
elif mode == 4: # read error
print "Error level found: " + str(ord(d))
self.error = ord(d)
if leng == 2:
return self.getPacket(6, id, leng, ord(d), list())
else:
return self.getPacket(5, id, leng, ord(d), list())
elif mode == 5: # read params
print "Parameter found: " + str(ord(d))
params.append(ord(d))
if len(params) + 2 == leng:
return self.getPacket(6, id, leng, error, params)
else:
return self.getPacket(5, id, leng, error, params)
elif mode == 6: # read checksum
print "Checksum found: " + str(ord(d))
checksum = id + leng + error + sum(params) + ord(d)
print "Checksum computed: " + str(checksum)
if checksum % 256 != 255:
print "Checksum ERROR"
return None
return params
# fail
return None
def getReg(self, index, regstart, rlength):
""" Get the value of registers, should be called as such:
ax12.getReg(1,1,1) """ |
vals = self.execute(index, AX_READ_DATA, [regstart, rlength])
if vals == None:
print "Read Failed: Servo ID = " + str(index)
return -1
if rlength == 1:
return vals[0]
return vals
def syncWrite(self, regstart, vals):
""" Set the value of registers. Should be called as such:
ax12.syncWrite(reg, ((i | d1, val1, val2), (id2, val1, val2))) """
self.ser.flushInput()
length = 4
valsum = 0
for i in vals:
length = length + len(i)
valsum = valsum + sum(i)
checksum = 255 - ((254 + length + AX_SYNC_WRITE + regstart + len(vals[0]) - 1 + valsum)%256)
# packet: FF FF ID LENGTH INS(0x03) PARAM .. CHECKSUM
self.ser.write(chr(0xFF)+chr(0xFF)+chr(0xFE)+chr(length)+chr(AX_SYNC_WRITE)+chr(regstart)+chr(len(vals[0])-1))
for servo in vals:
for value in servo:
self.ser.write(chr(value))
self.ser.write(chr(checksum))
# no return info...
|
CrowdSpot/shareabouts | src/sa_web/views.py | Python | gpl-3.0 | 15,185 | 0.001317 | import requests
import yaml
import ujson as json
import logging
import os
import time
import hashlib
import httpagentparser
import urllib2
from .config import get_shareabouts_config
from django.shortcuts import render
from django.conf import settings
from django.core.cache import cache
from django.core.mail import EmailMultiAlternatives
from django.http import HttpResponse, Http404
from django.template import TemplateDoesNotExist, RequestContext
from django.template.loader import render_to_string
from django.utils.timezone import now
from django.views.decorators.csrf import ensure_csrf_cookie
from django.core.urlresolvers import resolve, reverse
from proxy.views import proxy_view as remote_proxy_view
log = logging.getLogger(__name__)
def make_api_root(dataset_root):
components = dataset_root.split('/')
if dataset_root.endswith('/'):
return '/'.join(components[:-4]) + '/'
else:
return '/'.join(components[:-3]) + '/'
def make_auth_root(dataset_root):
return make_api_root(dataset_root) + 'users/'
def make_resource_uri(resource, root):
resource = resource.strip('/')
root = root.rstrip('/')
uri = '%s/%s' % (root, resource)
return uri
class ShareaboutsApi (object):
def __init__(self, dataset_root):
self.dataset_root = dataset_root
self.auth_root = make_auth_root(dataset_root)
self.root = make_api_root(dataset_root)
def get(self, resource, default=None, **kwargs):
uri = make_resource_uri(resource, root=self.dataset_root)
res = requests.get(uri, params=kwargs,
headers={'Accept': 'application/json'})
return (res.text if res.status_code == 200 else default)
def current_user(self, default=u'null', **kwargs):
uri = make_resource_uri('current', root=self.auth_root)
res = requests.get(uri, headers={'Accept': 'application/json'}, **kwargs)
return (res.text if res.status_code == 200 else default)
@ensure_csrf_cookie
def index(request, place_id=None):
# Load app config settings
config = get_shareabouts_config(settings.SHAREABOUTS.get('CONFIG'))
config.update(settings.SHAREABOUTS.get('CONTEXT', {}))
# Get initial data for bootstrapping into the page.
dataset_root = settings.SHAREABOUTS.get('DATASET_ROOT')
if (dataset_root.startswith('file:')):
dataset_root = request.build_absolute_uri(reverse('api_proxy', args=('',)))
api = ShareaboutsApi(dataset_root=dataset_root)
# Get the content of the static pages linked in the menu.
pages_config = config.get('pages', [])
pages_config_json = json.dumps(pages_config)
# The user token will be a pair, with the first element being the type
# of identification, and the second being an identifier. It could be
# 'username:mjumbewu' or 'ip:123.231.132.213', etc. If the user is
# unauthenticated, the token will be session-based.
if 'user_token' not in request.session:
t = int(time.time() * 1000)
ip = request.META['REMOTE_ADDR']
unique_string = str(t) + str(ip)
session_token = 'session:' + hashlib.md5(unique_string).hexdigest()
request.session['user_token'] = session_token
request.session.set_expiry(0)
user_token_json = u'"{0}"'.format(request.session['user_token'])
# Get the browser that the user is using.
user_agent_string = request.META.get('HTTP_USER_AGENT', '')
if user_agent_string:
user_agent = httpagentparser.detect(user_agent_string)
user_agent_json = json.dumps(user_agent)
else:
# If no user agent is specified, stub a generic one in.
user_agent_json = json.dumps({
"os": {"name": ""},
"browser": {"name": "", "version": None},
"platform": {"name": "", "version": None}
})
place = None
if place_id and place_id != 'new':
place = api.get('places/' + place_id)
if place:
place = json.loads(place)
context = {'config': config,
'user_token_json': user_token_json,
'pages_config': pages_config,
'pages_config_json': pages_config_json,
'user_agent_json': user_agent_json,
# Useful for customized meta tags
'place': place,
'API_ROOT': api.root,
'DATASET_ROOT': api.dataset_root,
}
return render(request, 'index.html', context)
def place_was_created(request, path, response):
path = path.strip('/')
return (
path.startswith('places') and
not path.startswith('places/') and
response.status_code == 201)
def send_place_created_notifications(request, response):
config = get_shareabouts_config(settings.SHAREABOUTS. | get('CONFIG'))
config.update(settings.SHAREABOUTS.get('CONTEXT', {}))
# Before we start, check whether we're | configured to send at all on new
# place.
should_send = config.get('notifications', {}).get('on_new_place', False)
if not should_send:
return
# First, check that we have all the settings and data we need. Do not bail
# after each error, so that we can report on all the validation problems
# at once.
errors = []
try:
# The reuest has any potentially private data fields.
requested_place = json.loads(request.body)
except ValueError:
errors.append('Received invalid place JSON from request: %r' % (request.body,))
try:
# The response has things like ID and cretated datetime
try: response.render()
except: pass
place = json.loads(response.content)
except ValueError:
errors.append('Received invalid place JSON from response: %r' % (response.content,))
try:
from_email = settings.EMAIL_ADDRESS
except AttributeError:
errors.append('EMAIL_ADDRESS setting must be configured in order to send notification emails.')
try:
email_field = config.get('notifications', {}).get('submitter_email_field', 'submitter_email')
recipient_email = requested_place['properties'][email_field]
except KeyError:
errors.append('No "%s" field found on the place. Be sure to configure the "notifications.submitter_email_field" property if necessary.' % (email_field,))
# Bail if any errors were found. Send all errors to the logs and otherwise
# fail silently.
if errors:
for error_msg in errors:
log.error(error_msg)
return
# If the user didn't provide an email address, then no need to go further.
if not recipient_email:
return
# Set optional values
bcc_list = getattr(settings, 'EMAIL_NOTIFICATIONS_BCC', [])
# If we didn't find any errors, then render the email and send.
context_data = RequestContext(request, {
'place': place,
'email': recipient_email,
'config': config,
'site_root': request.build_absolute_uri('/'),
})
subject = render_to_string('new_place_email_subject.txt', context_data)
body = render_to_string('new_place_email_body.txt', context_data)
try:
html_body = render_to_string('new_place_email_body.html', context_data)
except TemplateDoesNotExist:
html_body = None
# connection = smtp.EmailBackend(
# host=...,
# port=...,
# username=...,
# use_tls=...)
# NOTE: In Django 1.7+, send_mail can handle multi-part email with the
# html_message parameter, but pre 1.7 cannot and we must construct the
# multipart message manually.
msg = EmailMultiAlternatives(
subject,
body,
from_email,
to=[recipient_email],
bcc=bcc_list)#,
# connection=connection)
if html_body:
msg.attach_alternative(html_body, 'text/html')
msg.send()
return
def proxy_view(request, url, requests_args={}):
# For full URLs, use a real proxy.
if url.startswith('http:') or url.startswith('https:'):
return remote_proxy_view(request, url, requests_args=requests_args)
# For local paths, use a simpler proxy. If there are headers specifi |
ubuntu-touch-apps/filemanager-app | tests/autopilot/filemanager/fixture_setup.py | Python | gpl-3.0 | 2,725 | 0 | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
#
# Copyright (C) 2013, 2014 Canonical Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import logging
import os
import shutil
import tempfile
import autopilot.logging
import fixtures
logger = logging.getLogger(__name__)
class TemporaryFileInDirectory(fixtures.Fixture):
"""Create a temporafy file in a specified directory."""
def __init__(self, parent_directory):
super(TemporaryFileInDirectory, self).__init__()
self.parent_directory = parent_directory
def setUp(self):
super(TemporaryFileInDirectory, self).setUp()
_, self.path = tempfile.mkstemp(
prefix='tmpfm', dir=self.parent_directory)
logger.debug(
'Created temporary file {} in {}.'.format(
self.path, self.parent_directory))
self.addCleanup(self.delete_ | file, self.path)
@autopilot.logging.log_action(logger.info | )
def delete_file(self, path):
"""Delete a file, if it exists."""
if os.path.exists(path):
logger.debug('Deleting file.')
os.remove(path)
else:
logger.debug('File does not exist.')
class TemporaryDirectoryInDirectory(fixtures.Fixture):
"""Create a temporary directory in a specified directory."""
def __init__(self, parent_directory):
super(TemporaryDirectoryInDirectory, self).__init__()
self.parent_directory = parent_directory
def setUp(self):
super(TemporaryDirectoryInDirectory, self).setUp()
self.path = tempfile.mkdtemp(
prefix='tmpfm', dir=self.parent_directory)
logger.debug(
'Created temporary directory {} in parent directory {}'.format(
self.path, self.parent_directory))
self.addCleanup(self.delete_directory, self.path)
@autopilot.logging.log_action(logger.info)
def delete_directory(self, path):
"""Delete a directory, if it exists."""
if os.path.exists(path):
logger.debug('Deleting directory.')
shutil.rmtree(path)
else:
logger.debug('Directory does not exist.')
|
titasakgm/brc-stock | openerp/addons/bangkok_rubber/__openerp__.py | Python | agpl-3.0 | 1,546 | 0.003234 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Bangkok Rubber Module',
'version': '0.1',
'category': 'Tools',
'description': """
""",
'author': 'Mr.Tititab Srisookco',
'website': 'http://www.ineco.co.th',
'summary': '',
'depends': ['account','purchase','sale','stock','product'],
'data': [ ],
'up | date_xml': [
'security.xml',
'stock_view.xml',
'adempier_view.xml',
],
'images': [],
'installable': True,
'a | pplication': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
backmari/moose | python/peacock/Execute/TerminalTextEdit.py | Python | lgpl-2.1 | 1,846 | 0.00325 | #!/usr/bin/env python
from PyQt5.QtWidgets import QTextEdit, QMenu, QFileDialog, QSizePolicy
import mooseutils
class TerminalTextEdit(QTextEdit):
"""
A readonly text edit that replaces terminal codes with appropiate html codes.
Also uses fixed font.
"""
def __init__(self, **kwds):
super(TerminalTextEdit, self).__init__(**kwds)
self.setStyleSheet("TerminalTextEdit { background: black; color: white; }")
self.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.setReadOnly(True)
def contextMenuEvent(self, event):
"""
User requested a context menu.
Input:
event: The QEvent()
"""
menu = QMenu()
save_action = menu.addAction("Save")
clear_action = menu.addAction("Clear")
action = menu.exec_(event.globalPos())
if action == save_action:
self.save()
elif action == clear_action:
self.clear()
def save(self):
"""
Save the contents into a file.
"""
fname, other = QFileDialog.getSaveFileName(self, "Choose output", "", "Output file (*.log *.txt)")
if fname:
try:
with open(fname, "w") as f:
f.write(self.toPlainText())
mooseutils.mooseMessage("Saved content to %s" % fname)
except Exception as e:
mooseutils.mooseError("Failed to save file: %s" % e, dialog=True)
def clear(self):
"""
Clear the output
"""
self.setHtml("")
if __name__ == "__main__":
from PyQt5.QtWidgets import QApplication
| import sys
qapp = QApplication(sys.argv)
w = TerminalTextEdit()
w.append('<span style="color:red;">foo</s | pan>')
w.show()
w.setEnabled(True)
sys.exit(qapp.exec_())
|
madoodia/codeLab | nuke_sdk/CompoTool/menu.py | Python | mit | 630 | 0.004762 | # madoodia@gmail.com
# This to | ol created for using in composite, for Hossein Bayat
import os
import nuke
# add our paths to the plugins path of nuke
current_dir = os.path.dirname(os.path.abspath(__file__))
icons_path = current_dir + r"\icons"
nuke.pluginAddPath(icons_path)
gizmos_path = current_dir + r"\gizmos"
nuke.pluginAddPath(g | izmos_path)
scripts_path = current_dir + r"\scripts"
nuke.pluginAddPath(scripts_path)
import app
# adding HB menu
nuke.menu('Nuke').addCommand('--HB--/CompoTool', 'app.show()', 'ctrl+alt+n', icon='target_d.png')
nuke.menu('Nuke').addCommand('--HB--/About', 'app.aboutUs()', 'ctrl+alt+m')
|
google-code/billreminder | src/gui/adddialog.py | Python | mit | 20,237 | 0.004101 | # -*- coding: utf-8 -*-
__all__ = ['AddDialog']
import pygtk
pygtk.require('2.0')
import gtk
import gconf
import datetime
import locale
import gobject
from lib import utils
from lib import common
from lib import scheduler
from lib.bill import Bill
from lib.actions import Actions
from lib.utils import create_pixbuf
from lib import i18n
from gui.widgets.datebutton import DateButton
from gui.categoriesdialog import CategoriesDialog
from lib.common import GCONF_PATH, GCONF_GUI_PATH, GCONF_ALARM_PATH
class AddDialog(gtk.Dialog):
"""
Class used to generate dialog to allow user to enter/edit records.
"""
def __init__(self, title=None, parent=None, record=None, selectedDate=None) | :
gtk.Dialog. | __init__(self, title=title, parent=parent,
flags=gtk.DIALOG_MODAL|gtk.DIALOG_NO_SEPARATOR,
buttons=(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT,
gtk.STOCK_SAVE, gtk.RESPONSE_ACCEPT))
self.set_icon_from_file(common.APP_ICON)
self.set_border_width(6)
self.set_resizable(False)
if parent:
self.set_transient_for(parent)
self.set_position(gtk.WIN_POS_CENTER_ON_PARENT)
# If we have a selected date, then set calendar to use it
if not selectedDate:
selectedDate = datetime.datetime.today()
self.selectedDate = selectedDate
self.gconf_client = gconf.client_get_default()
# Private copy of any record passed
self.currentrecord = record
self.alarm = [None, None]
# TODO: This needs to be run BEFORE connecting the widgets
self._set_currency()
# Set up the UI
self._initialize_dialog_widgets()
self._connect_fields()
self.category_index_before = 0
self.connect("response", self._on_response)
# If a record was passed, we're in edit mode
if record:
self._populate_fields()
#in edit mode we must disable repetition
self.repeatSpinner.set_sensitive(False)
self.frequency.set_sensitive(False)
self.repeatlabel.set_sensitive(False)
else:
# Use alarm values from preferences
if self.gconf_client.get_bool(GCONF_ALARM_PATH + 'show_alarm') == 'true':
atime = self.gconf_client.get_string(GCONF_ALARM_PATH + 'show_alarm_at_time')
adays = self.gconf_client.get_int(GCONF_ALARM_PATH + 'show_alarm_before_days')
alarmDate = scheduler.get_alarm_timestamp(adays, atime, selectedDate)
self.alarmbutton.set_date(alarmDate)
def _set_currency(self):
self.decimal_sep = locale.localeconv()['mon_decimal_point']
self.thousands_sep = locale.localeconv()['mon_thousands_sep']
self.allowed_digts = [self.decimal_sep , self.thousands_sep]
self.allowed_digts += [str(i) for i in range(10)]
def _initialize_dialog_widgets(self):
self.vbox.set_spacing(12)
self.topcontainer = gtk.HBox(homogeneous=False, spacing=12)
self.topcontainer.set_border_width(6)
self.calbox = gtk.VBox(homogeneous=False, spacing=6)
self.fieldbox = gtk.VBox(homogeneous=False, spacing=6)
# Add calendar and label
self.callabel = gtk.Label()
self.callabel.set_markup_with_mnemonic(_("<b>_Due Date:</b>"))
self.callabel.set_alignment(0.00, 0.50)
self.calendar = gtk.Calendar()
self.callabel.set_mnemonic_widget(self.calendar)
## repeat times
self.repeatlabel = gtk.Label()
self.repeatlabel.set_markup_with_mnemonic(_("<b>_Repeat:</b>"))
self.repeatlabel.set_alignment(0.00, 0.50)
adj = gtk.Adjustment(00.0, 1.0, 23.0, 1.0)
self.repeatSpinner = gtk.SpinButton(adj, 0, 0)
self.repeatSpinner.set_tooltip_text(_("How many times to repeat this bill."))
self.repeatSpinner.set_wrap(True)
self.repeatSpinner.set_numeric(True)
self.repeatSpinner.set_update_policy(gtk.UPDATE_IF_VALID)
self.repeatSpinner.set_snap_to_ticks(True)
## Repeating bills
self.frequency = gtk.combo_box_new_text()
self.repeatlabel.set_mnemonic_widget(self.frequency)
self.frequency.connect('changed', self._on_frequency_changed)
#self.frequency.set_row_separator_func(self._determine_separator)
self._populate_frequency()
hbox = gtk.HBox(homogeneous=False, spacing=12)
hbox.pack_start(self.repeatlabel, expand=False, fill=True, padding=0)
hbox.pack_start(self.frequency, expand=True, fill=True, padding=0)
hbox.pack_start(self.repeatSpinner, expand=True, fill=True, padding=0)
## Pack it all up
self.calbox.pack_start(self.callabel,
expand=False, fill=True)
self.calbox.pack_start(self.calendar,
expand=True, fill=True)
self.calbox.pack_start(hbox,
expand=True, fill=True)
# Fields
## Table of 5 x 2
self.table = gtk.Table(rows=5, columns=2, homogeneous=False)
### Spacing to make things look better
self.table.set_col_spacings(12)
self.table.set_row_spacings(6)
## Labels
self.payeelabel = gtk.Label()
self.payeelabel.set_markup_with_mnemonic(_("<b>_Payee:</b>"))
self.payeelabel.set_alignment(0.00, 0.50)
self.amountlabel = gtk.Label()
self.amountlabel.set_markup_with_mnemonic(_("<b>_Amount:</b>"))
self.amountlabel.set_alignment(0.00, 0.50)
self.categorylabel = gtk.Label()
self.categorylabel.set_markup_with_mnemonic(_("<b>_Category:</b>"))
self.categorylabel.set_alignment(0.00, 0.50)
self.noteslabel = gtk.Label()
self.noteslabel.set_markup_with_mnemonic(_("<b>_Notes:</b>"))
self.noteslabel.set_alignment(0.00, 0.00)
self.alarmlabel = gtk.Label()
self.alarmlabel.set_markup_with_mnemonic(_("<b>A_larm:</b>"))
self.alarmlabel.set_alignment(0.00, 0.50)
## Fields
### Payee
self.payee = gtk.ComboBoxEntry()
self.payeelabel.set_mnemonic_widget(self.payee)
self.payeecompletion = gtk.EntryCompletion()
self.payee.child.set_completion(self.payeecompletion)
self._populate_payee() # Populate combobox with payee from db
### Amount
self.amount = gtk.Entry()
self.amountlabel.set_mnemonic_widget(self.amount)
self.amount.set_alignment(1.00)
### Category
self.categorydock = gtk.HBox(homogeneous=False, spacing=4)
self.category = gtk.ComboBox()
self.categorylabel.set_mnemonic_widget(self.category)
px = gtk.CellRendererPixbuf()
txt = gtk.CellRendererText()
self.category.pack_start(px, False)
self.category.pack_start(txt, False)
self.category.add_attribute(px, "pixbuf", 0)
self.category.add_attribute(txt, "text", 1)
self.category.set_row_separator_func(self._determine_separator)
self.categorybutton = gtk.Button()
self.categorybutton.set_tooltip_text(_("Manage Categories"))
self.categorybuttonimage = gtk.Image()
self.categorybuttonimage.set_from_stock(gtk.STOCK_EDIT,
gtk.ICON_SIZE_BUTTON)
self.categorybutton.set_image(self.categorybuttonimage)
self.categorydock.pack_start(self.category, expand=True,
fill=True, padding=0)
self.categorydock.pack_start(self.categorybutton, expand=False,
fill=True, padding=0)
self._populate_category() # Populate combobox with category from db
### Notes
self.notesdock = gtk.ScrolledWindow()
self.notesdock.set_shadow_type(gtk.SHADOW_OUT)
self.notesdock.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.notes = gtk.TextView()
self.noteslabel.set_mnemonic_widget(self.notes)
self.notes.set_wrap_mode(gtk.WRAP_WORD)
self.notesdock.add_with_viewport(self.notes)
### Buffer obje |
realraum/ari | test/overlay-test.py | Python | gpl-3.0 | 6,283 | 0.006844 | #!/usr/bin/env python
##
## ari
##
## the realraum audience response indicator
##
##
## Copyright (C) 2015 Christian Pointner <equinox@spreadspace.org>
##
## This file is part of ari.
##
## ari is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## any later version.
##
## ari is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with ari. If not, see <http://www.gnu.org/licenses/>.
##
import sys
import getopt
import gi
gi.require_version('Gst', '1.0')
from gi.repository import Gst, GObject
class R3Ari():
def __init__(self):
GObject.threads_init()
Gst.init(None)
self.mainloop_ = GObject.MainLoop()
self.pipeline_ = None
self.watch_id_ = None
self.video_width_ = 640
self.video_height_ = 480
self.meter_width_ = 560
self.meter_height_ = 23
self.meter_spacing_ = 7
self.l = 0.3
self.r = 0.7
def error(self, message, arg=None):
print "ERROR: %s (%s)" % (message, arg)
def on_message(self, bus, message):
s = message.get_structure()
if s.get_name() == 'level':
sys.stdout.write("\r")
for i in range(0, len(s['peak'])):
decay = clamp(s['decay'][i], -90.0, 0.0)
peak = clamp(s['peak'][i], -90.0, 0.0)
# if peak > decay:
# print "ERROR: peak bigger than decay!"
sys.stdout.write("channel %d: %3.2f / %3.2f, " % (i, decay, peak))
sys.stdout.flush()
return True
def run(self):
try:
s = 'videotestsrc is-live=true !xvimagesink' % ()
self.pipeline_ = Gst.Pipeline.new()
source = Gst.ElementFactory.make("videotestsrc")
source.set_property("is-live", True)
self.pipeline_.add(source)
filter = Gst.ElementFactory.make("capsfilter")
caps = Gst.Caps.from_string("video/x-raw,format=I420,width=%i,height=%i,framerate=50/1" % (self.video_width_, self.video_height_))
filter.set_property("caps", caps)
self.pipeline_.add(filter)
conv1 = Gst.ElementFactory.make("videoconvert")
self.pipeline_.add(conv1)
q1 = Gst.ElementFactory.make("queue")
self.pipeline_.add(q1)
overlay = Gst.ElementFactory.make("rsvgoverlay")
overlay.set_property("data", self.getVumeterSVG(0, 0, 0, 0))
self.pipeline_.add(overlay)
GObject.timeout_add(20, self.updateMeter, overlay)
conv2 = Gst.ElementFactory.make("videoconvert")
self.pipeline_.add(conv2)
sink = Gst.ElementFactory.make("xvimagesink")
self.pipeline_.add(sink)
source.link(filter)
filter.link(q1)
q1.link(conv1)
conv1.link(overlay)
overlay.link(conv2)
conv2.link(sink)
self.pipeline_.get_bus().add_signal_watch()
self.watch_id_ = self.pipeline_.get_bus().connect('message::element', self.on_message)
self.pipeline_.set_state(Gst.State.PLAYING)
self.mainloop_.run()
except GObject.GError, e:
self.error('Could not create pipeline', e.message)
except KeyboardInterrupt:
pass
finally:
if self.pipeline_ and self.watch_id_:
self.pipeline_.get_bus().disconnect(self.watch_id_)
self.pipeline_.get_bus().remove_signal_watch()
self.pipeline_.set_state(Gst.State.NULL)
def getVumeterSVG(self, l, lp, r, rp):
svg = "<svg>\n"
svg += " <defs>\n"
svg += " <linearGradient id='vumeter' x1='0%' y1='0%' x2='100%' y2='0%'>\n"
svg += " <stop offset='0%' style='stop-color:rgb(0,255,0);stop-opacity:1' />\n"
svg += " <stop offset='100%' style='stop-color:rgb(255,0,0);stop-opacity:1' />\n"
svg += " </linearGradient>\n"
svg += " </defs>\n"
box_w = self.meter_width_ + 2*self.meter_spacing_
box_h = 2*self.meter_height_ + 3*self.meter_spacing_
box_x = (self.video_width_ - box_w)/2
box_y = self.meter_spacing_
svg += " <rect x='%i' y='%i' rx='%i' ry='%i' width='%i' height='%i' style='fill:black;opacity:0.3' />\n" %(
box_x, box_y, self.meter_spacing_, self.meter_spacing_, box_w, box_h)
svg += " <rect x='%i' y='%i' width='%i' height='%i' style='fill:url(#vumeter);opacity:0.9' />\n" %(
box_x + self.meter | _spacing_, box_y + self.meter_spacing_, self.meter_width_*l, self.meter_height_)
svg += " <line x1='%i' y1='%i' x | 2='%i' y2='%i' style='stroke:rgb(255,0,0);stroke-width:3' />\n" %(
box_x + self.meter_width_*lp, box_y + self.meter_spacing_, box_x + self.meter_width_*lp, box_y + self.meter_spacing_ + self.meter_height_)
svg += " <rect x='%i' y='%i' width='%i' height='%i' style='fill:url(#vumeter);opacity:0.9' />\n" %(
box_x + self.meter_spacing_, box_y + self.meter_height_ + 2*self.meter_spacing_, self.meter_width_*r, self.meter_height_)
svg += " <line x1='%i' y1='%i' x2='%i' y2='%i' style='stroke:rgb(255,0,0);stroke-width:3' />\n" %(
box_x + self.meter_width_*rp, box_y + self.meter_height_ + 2*self.meter_spacing_,
box_x + self.meter_width_*rp, box_y + 2*self.meter_spacing_ + 2*self.meter_height_)
svg += "</svg>\n"
return svg
def updateMeter(self, overlay):
self.l += 0.01
if self.l > 0.9:
self.l = 0.0
lp = self.l + 0.1
self.r += 0.01
if self.r > 0.9:
self.r = 0.0
rp = self.r + 0.1
overlay.set_property("data", self.getVumeterSVG(self.l, lp, self.r, rp))
return True
if __name__ == '__main__':
a = R3Ari()
a.run()
|
jinzekid/codehub | python/day9/ssh.py | Python | gpl-3.0 | 310 | 0 | # Author: Jason Lu
import paramiko
ssh = p | aramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
ssh.connect(hostname='10.0.031', port=52113, username='root', password='123')
stdin, stdout, stderr = ssh.exec_command('df')
result = stdout.read()
print(result.deco | de())
ssh.close()
|
PandaWei/tp-libvirt | libvirt/tests/src/virsh_cmd/domain/virsh_setmaxmem.py | Python | gpl-2.0 | 7,485 | 0 | import logging
from autotest.client.shared import utils, error
from virttest import virsh, virt_vm
from virttest.libvirt_xml import vm_xml
def run(test, params, env):
"""
Test command: virsh setmaxmem.
1) Prepare vm environment.
2) Handle params
3) Run test command and get vm started then get maxmem.
4) Recover environment.
5) Check result.
TODO: support more options:--live,--config,--current.
"""
def vmxml_max_mem(vm_name):
vmxml = vm_xml.VMXML.new_from_dumpxml(vm_name)
return int(vmxml.max_mem)
def make_domref(domarg, vm_ref, domid, vm_name, domuuid):
# Specify domain as argument or parameter
if domarg == "yes":
dom_darg_key = "domainarg"
else:
dom_darg_key = "domain"
# How to reference domain
if vm_ref == "domid":
dom_darg_value = domid
elif vm_ref == "domname":
dom_darg_value = vm_name
elif vm_ref == "domuuid":
dom_darg_value = domuuid
elif vm_ref == "none":
dom_darg_value = None
elif vm_ref == "emptystring":
dom_darg_value = '""'
else: # stick in value directly
dom_darg_value = vm_ref
return {dom_darg_key: dom_darg_value}
def make_sizeref(sizearg, mem_ref, original_mem):
if sizearg == "yes":
size_darg_key = "sizearg"
else:
size_darg_key = "size"
if mem_ref == "halfless":
size_darg_value = "%d" % (original_mem / 2)
elif mem_ref == "halfmore":
size_darg_value = "%d" % int(original_mem * 1.5)
elif mem_ref == "same":
size_darg_value = "%d" % original_mem
elif mem_ref == "emptystring":
size_darg_value = '""'
elif mem_ref == "zero":
size_darg_value = "0"
elif mem_ref == "toosmall":
size_darg_value = "1024"
elif mem_ref == "toobig":
size_darg_value = "1099511627776" # (KiB) One Petabyte
elif mem_ref == "none":
size_darg_value = None
else: # stick in value directly
size_darg_value = mem_ref
return {size_darg_key: size_darg_value}
def is_old_libvirt():
regex = r'\s+\[--size\]\s+'
return bool(not virsh.has_command_help_match('setmaxmem', regex))
def is_xen_host():
check_cmd = "ls /dev/kvm"
return utils.run(check_cmd, ignore_status=True).exit_status
def is_in_range(actual, expected, error_percent):
deviation = 100 - (100 * (float(actual) / float(expected)))
logging.debug("Deviation: %0.2f%%", float(deviation))
return float(deviation) <= float(error_percent)
def print_debug_stats(original_vmxml_mem, original_dominfo_mem,
expected_mem, test_vmxml_mem, test_dominfo_mem):
dbgmsg = ("Original vmxml mem : %d KiB\n"
"Original dominfo mem : %d KiB\n"
"Expected max mem : %d KiB\n"
"Actual vmxml mem : %d KiB\n"
"Actual dominfo mem : %d KiB\n" % (
original_vmxml_mem,
original_dominfo_mem,
expected_mem,
test_vmxml_mem,
test_dominfo_mem))
for dbgline in dbgmsg.splitlines():
logging.debug(dbgline)
# MAIN TEST CODE ###
# Process cartesian parameters
vm_ref = params.get("setmaxmem_vm_ref", "")
mem_ref = params.get("setmaxmem_mem_ref", "")
status_error = "yes" == params.get("status_error", "no")
flags = params.get("setmaxmem_flags", "")
domarg = params.get("setmaxmem_domarg", "no")
sizearg = params.get("setmaxmem_sizearg", "no")
delta_per = params.get("setmaxmem_delta_per", "10")
vm_name = params.get("main_vm")
# Gather environment parameters
vm = env.get_vm(vm_name)
# FIXME: KVM does not support --live currently.
if (flags.count('live') or (flags.count('current') and vm.is_alive())):
raise error.TestNAError("KVM does not support --live.")
# Backup original XML
original_vmxml = vm_xml.VMXML.new_from_inactive_dumpxml(vm_name)
original_vmxml_mem = vmxml_max_mem(vm_name)
original_dominfo_mem = vm.get_max_mem()
domid = vm.get_id()
domuuid = vm.get_uuid()
uri = vm.connect_uri
old_libvirt = is_old_libvirt()
if old_libvirt:
logging.info("Running test on older libvirt")
use_kilobytes = True
else:
logging.info("Running test on newer libvirt")
use_kilobytes = False
xen_host = is_xen_host()
if xen_host:
logging.info("Running on xen host, %s offset is allowed.", delta_per)
# Argument pattern is complex, build with dargs
dargs = {'flagstr': flags,
'use_kilobytes': use_kilobytes,
'uri': uri, 'ignore_status': True, "debug": True}
dargs.update(make_domref(domarg, vm_ref, domid, vm_name, domuuid))
dargs.update(make_sizeref(sizearg, mem_ref, original_dominfo_mem))
if status_error:
logging.info("Error Test: Expecting an error to occur!")
try:
result = virsh.setmaxmem(**dargs)
status = result.exit_status
# Gather status if not running error test
start_status = 0 # Check can guest be started after maxmem-modified.
if not status_error:
if flags.count("config"):
vm.destroy()
if vm.state() == "shut off":
try:
vm.start()
except virt_vm.VMStartError, detail:
start_status = 1
logging.error("Start after VM's maxmem modified failed:%s",
d | etail)
# Actual results
test_vmxml_mem = vmxml_max_mem(vm_name)
test_dominfo_mem = vm.get_max_mem()
# Expected results for both vmxml and dominfo
if sizearg == "yes":
expected_mem = int(dargs["sizearg"])
else:
| expected_mem = int(dargs["size"])
print_debug_stats(original_vmxml_mem, original_dominfo_mem,
expected_mem, test_vmxml_mem, test_dominfo_mem)
else:
if vm.state() == "paused":
vm.resume()
finally:
original_vmxml.sync()
# Don't care about memory comparison on error test
if status_error:
if status is 0:
raise error.TestFail("Error test did not result in an error.")
else:
vmxml_match = (test_vmxml_mem == expected_mem)
if xen_host:
dominfo_match = is_in_range(test_dominfo_mem, expected_mem,
delta_per)
else:
dominfo_match = (test_dominfo_mem == expected_mem)
if (status or start_status or not vmxml_match or not dominfo_match):
msg = "test conditions not met: "
if status:
msg += "Non-zero virsh setmaxmem exit code. "
if not vmxml_match:
msg += "Max memory in VM's xml is not matched. "
if not dominfo_match:
msg += "Max memory in dominfo's output is not matched. "
if start_status:
msg += "Start after VM's max mem is modified failed."
raise error.TestFail(msg)
logging.info("Test end normally.")
|
gbrmachado/treeherder | tests/etl/test_buildapi.py | Python | mpl-2.0 | 10,999 | 0.001 | import json
import os
import pytest
import responses
from django.conf import settings
from django.core.cache import cache
from treeherder.etl.buildapi import (CACHE_KEYS,
Builds4hJobsProcess,
PendingJobsProcess,
RunningJobsProcess)
@pytest.fixture
def mock_buildapi_pending_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"builds-pending.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_PENDING_URL',
"file://{0}".format(path))
@pytest.fixture
def mock_buildapi_running_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"builds-running.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_RUNNING_URL',
"file://{0}".format(path))
@pytest.fixture
def mock_buildapi_builds4h_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"buildbot_text.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_BUILDS4H_URL',
"file://{0}".format(path))
@pytest.fixture
def mock_buildapi_pending_missing1_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"builds-pending-missing1.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_PENDING_URL',
"file://{0}".format(path))
@pytest.fixture
def mock_buildapi_running_missing1_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"builds-running-missing1.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_RUNNING_URL',
"file://{0}".format(path))
@pytest.fixture
def mock_buildapi_builds4h_missing1_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"buildbot_text-missing1.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_BUILDS4H_URL',
"file://{0}".format(path))
@pytest.fixture
def mock_buildapi_builds4h_missing_branch_url(monkeypatch):
tests_folder = os.path.dirname(os.path.dirname(__file__))
path = os.path.join(
tests_folder,
"sample_data",
"buildbot_text-missing_branch.json"
)
monkeypatch.setattr(settings,
'BUILDAPI_BUILDS4H_URL',
"file://{0}".format(path))
def test_ingest_pending_jobs(jm, initial_data,
mock_buildapi_pending_url,
mock_post_json,
mock_log_parser,
mock_get_resultset,
mock_get_remote_content):
"""
a new buildapi pending job creates a new obj in the job table
"""
etl_process = PendingJobsProcess()
new_jobs_were_added = etl_process.run()
assert new_jobs_were_added is True
assert cache.get(CACHE_KEYS['pending']) == set([24575179])
new_jobs_were_added = etl_process.run()
assert new_jobs_were_added is False
stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 1
def test_ingest_running_jobs(jm, initial_data,
mock_buildapi_running_url,
mock_post_json,
mock_log_parser,
mock_get_resultset,
mock_get_remote_content):
"""
a new buildapi running job creates a new obj in the job table
"""
etl_process = RunningJobsProcess()
new_jobs_were_added = etl_process.run()
assert new_jobs_were_added is True
assert cache.get(CACHE_KEYS['running']) == set([24767134])
new_jobs_were_added = etl_process.run()
assert new_jobs_were_added is Fal | se
stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 1
def test_ingest_builds4h_jobs(jm, initial_data,
mock_buildapi_builds | 4h_url,
mock_post_json,
mock_log_parser,
mock_get_resultset,
mock_get_remote_content):
"""
a new buildapi completed job creates a new obj in the job table
"""
etl_process = Builds4hJobsProcess()
new_jobs_were_added = etl_process.run()
assert new_jobs_were_added is True
assert len(cache.get(CACHE_KEYS['complete'])) == 32
new_jobs_were_added = etl_process.run()
assert new_jobs_were_added is False
stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 32
def test_ingest_running_to_complete_job(jm, initial_data,
mock_buildapi_running_url,
mock_buildapi_builds4h_url,
mock_post_json,
mock_log_parser,
mock_get_resultset,
mock_get_remote_content):
"""
a new buildapi running job transitions to a new completed job
"""
etl_process = RunningJobsProcess()
etl_process.run()
stored_running = jm.get_dhub().execute(proc="jobs_test.selects.jobs")
assert len(stored_running) == 1
# the first job in the sample data should overwrite the running job
# we just ingested. Leaving us with only 32 jobs, not 33.
etl_process = Builds4hJobsProcess()
etl_process.run()
stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 32
# all jobs should be completed, including the original one which
# transitioned from running.
for job in stored_obj:
assert job['state'] == 'completed'
def test_ingest_running_job_fields(jm, initial_data,
mock_buildapi_running_url,
mock_post_json,
mock_log_parser,
mock_get_resultset,
mock_get_remote_content):
"""
a new buildapi running job creates a new obj in the job table
"""
etl_process = RunningJobsProcess()
etl_process.run()
stored_obj = jm.get_dhub().execute(proc="jobs_test.selects.jobs")
jm.disconnect()
assert len(stored_obj) == 1
assert stored_obj[0]["start_timestamp"] is not 0
#####################
# MISSING RESULTSETS
#####################
def test_ingest_pending_jobs_1_missing_resultset(jm, initial_data,
sample_resultset, test_repository, mock_buildapi_pending_missing1_url,
mock_post_json, mock_get_resultset, mock_get_remote_content,
activate_responses):
"""
Ensure the pending job with the missing resultset is queued for refetching
"""
etl_process = PendingJobsProcess()
_do_missing_resultset_test(jm, etl_process)
def test_ingest_running_jobs_1_missing_resultset(jm, initial_data,
sample_resultset, test_repository, mock_buildapi_running_missing1_url,
mock_post_json, mock_get_resultset, mock_get_remote_content,
activate_responses):
"""
Ensure the running job with the mis |
NervanaSystems/coach | rl_coach/presets/CartPole_Dueling_DDQN.py | Python | apache-2.0 | 2,384 | 0.002936 | import math
from rl_coach.agents.ddqn_agent import DDQNAgentParameters
from rl_coach.architectures.head_parameters import DuelingQHeadParameters
from rl_coach.base_parameters import VisualizationParameters, PresetValidationParameters
from rl_coach.core_types import TrainingSteps, EnvironmentEpisodes, EnvironmentSteps
from rl_coach.environments.gym_environment import GymVectorEnvironment
from rl_coach.graph_managers.basic_rl_graph_manager import BasicRLGraphManager
from rl_coach.graph_managers.graph_manager import ScheduleParameters
from rl_coach.memories.memory import MemoryGranularity
from rl_coach.schedules import LinearSchedule
####################
# Graph Scheduling #
####################
schedule_params = ScheduleParameters()
schedule_params.improve_steps = TrainingSteps(10000000000)
schedule_params.steps_between_evaluation_periods = EnvironmentEpisodes(10)
schedule_params.evaluation_steps = EnvironmentEpisodes(1)
schedule_params.heatup_steps = EnvironmentSteps(1000)
#########
# Agent #
#########
agent_params = DDQNAgentParameters()
# DDQN params
agent_params.algorithm.num_steps_between_copying_online_weights_to_target = EnvironmentSteps(100)
agent_params.algorithm.discount = 0.99
agent_params.algorithm.num_consecutive_playing_steps = EnvironmentSteps(1)
# NN configuration
agent_params.network_wrappers['main'].learning_rate = 0.00025
agent_params.network_wrappers['main'].replace_mse_with_huber_loss = False
agent_params.network_wrappers['main'].heads_parameters = \
[DuelingQHeadParameters(rescale_gradient_from_head_by_factor=1/math.sqrt(2))]
# ER size
agent_params.memory.max_size = (MemoryGranularity.Transitions, 40000)
# E-Greedy schedule
agent_params.exploration.epsilon_schedule = LinearSchedule(1.0, 0.01, 10000)
################
# Environment #
################
env_params = GymVectorEnvironment(level='CartPole-v0')
########
# Test #
########
preset_validation_params = PresetValidationParameters()
preset_validation_params.test = True
preset_validation_params.min_reward_threshold = 150
preset_validation_params.max_episodes_to_achieve_reward = 300 |
graph_manager = BasicRLGraphManager(agent_params=agent_params, env_params=env_params,
schedule_params=schedule_params, vis_params=VisualizationParameters(),
| preset_validation_params=preset_validation_params)
|
ForensicTools/GRREAT-475_2141-Chaigon-Failey-Siebert | parsers/ie_history.py | Python | apache-2.0 | 5,573 | 0.00646 | #!/usr/bin/env python
# Copyright 2011 Google Inc. All Rights Reserved.
"""Parser for IE index.dat files.
Note that this is a very naive and incomplete implementation and should be
replaced with a more intelligent one. Do not implement anything based on this
code, it is a placeholder for something real.
For anyone who wants a useful reference, see this:
http://heanet.dl.sourceforge.net/project/libmsiecf/Documentation/MSIE%20Cache%20
File%20format/MSIE%20Cache%20File%20%28index.dat%29%20format.pdf
"""
import datetime
import glob
import operator
import os
import struct
import sys
import urlparse
import logging
from grr.lib import parsers
from grr.lib import rdfvalue
# Difference between 1 Jan 1601 and 1 Jan 1970.
WIN_UNIX_DIFF_MSECS = 11644473600 * 1e6
class IEHistoryParser(parsers.FileParser):
"""Parse IE index.dat files into BrowserHistoryItem objects."""
output_types = ["BrowserHistoryItem"]
supported_artifacts = ["InternetExplorerHistory"]
def Parse(self, stat, file_object, knowledge_base):
"""Parse the History file."""
_, _ = stat, knowledge_base
# TODO(user): Convert this to use the far more intelligent plaso parser.
ie = IEParser(file_object)
for dat in ie.Parse():
yield rdfvalue.BrowserHistoryItem(
url=dat["url"], domain=urlparse.urlparse(dat["url"]).netloc,
access_time=dat.get("mtime"),
program_name="Internet Explorer", source_urn=stat.aff4path)
class IEParser(object):
"""Parser object for index.dat files.
The file format for IE index.dat files is somewhat poorly documented.
The following implementation is based on information from:
http://www.forensicswiki.org/wiki/Internet_Explorer_History_File_Format
Returns results in chronological order based on mtime
"""
FILE_HEADER = "Client UrlCache MMF Ver 5.2"
BLOCK_SIZE = 0x80
def __init__(self, input_obj):
"""Initialize.
Args:
input_obj: A file like object to read the index.dat from.
"""
self._file = input_obj
self._entries = []
def Parse(self):
"""Parse the file."""
if not self._file:
logging.error("Couldn't open file")
return
# Limit read size to 5MB.
self.input_dat = self._file.read(1024 * 1024 * 5)
if not self.input_dat.startswith(self.FILE_HEADER):
logging.error("Invalid index.dat file %s", self._file)
return
# Events aren't time ordered in the history file, so we collect them all
# then sort.
events = []
for event in self._DoParse():
events.append(event)
for event in sorted(events, key=operator.itemgetter("mtime")):
yield event
def _GetRecord(self, offset, record_size):
"""Retrieve a single record from the file.
Args:
offset: offset from start of input_dat where header starts
record_size: length of the header according to file (untrusted)
Returns:
A dict containing a single browser histo | ry record.
"""
record_header = "<4sLQQL"
get4 = lambda x: struct.unpack("<L", self.input_dat[x:x+4])[0]
url_offset = struct.unpack("B", self.input_dat[offset+52:offset+53])[0]
if url_offset in [0xFF, 0xFE]:
return None
data_offset = get4( | offset + 68)
data_size = get4(offset + 72)
start_pos = offset + data_offset
data = struct.unpack("{0}s".format(data_size),
self.input_dat[start_pos:start_pos + data_size])[0]
fmt = record_header
unknown_size = url_offset - struct.calcsize(fmt)
fmt += "{0}s".format(unknown_size)
fmt += "{0}s".format(record_size - struct.calcsize(fmt))
dat = struct.unpack(fmt, self.input_dat[offset:offset+record_size])
header, blocks, mtime, ctime, ftime, _, url = dat
url = url.split(chr(0x00))[0]
if mtime: mtime = mtime/10 - WIN_UNIX_DIFF_MSECS
if ctime: ctime = ctime/10 - WIN_UNIX_DIFF_MSECS
return {"header": header, # the header
"blocks": blocks, # number of blocks
"urloffset": url_offset, # offset of URL in file
"data_offset": data_offset, # offset for start of data
"data_size": data_size, # size of data
"data": data, # actual data
"mtime": mtime, # modified time
"ctime": ctime, # created time
"ftime": ftime, # file time
"url": url # the url visited
}
def _DoParse(self):
"""Parse a file for history records yielding dicts.
Yields:
Dicts containing browser history
"""
get4 = lambda x: struct.unpack("<L", self.input_dat[x:x+4])[0]
filesize = get4(0x1c)
offset = get4(0x20)
coffset = offset
while coffset < filesize:
etype = struct.unpack("4s", self.input_dat[coffset:coffset + 4])[0]
if etype == "REDR":
pass
elif etype in ["URL "]:
# Found a valid record
reclen = get4(coffset + 4) * self.BLOCK_SIZE
yield self._GetRecord(coffset, reclen)
coffset += self.BLOCK_SIZE
def main(argv):
if len(argv) < 2:
print "Usage: {0} index.dat".format(os.path.basename(argv[0]))
else:
files_to_process = []
for input_glob in argv[1:]:
files_to_process += glob.glob(input_glob)
for input_file in files_to_process:
ie = IEParser(open(input_file))
for dat in ie.Parse():
dat["ctime"] = datetime.datetime.utcfromtimestamp(dat["ctime"]/1e6)
print "{ctime} {header} {url}".format(**dat)
if __name__ == "__main__":
main(sys.argv)
|
caseyching/incubator-airflow | airflow/contrib/operators/bigquery_operator.py | Python | apache-2.0 | 3,249 | 0.002155 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from airflow.contrib.hoo | ks.bigquery_hook import BigQueryHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class BigQueryOperator(BaseOperator):
"""
Executes BigQuery SQL queries in a specific BigQuery database
"""
template_fields = ('bql', 'destination_dataset_table')
template_ext = ('.sql',)
ui_color = '#e4f0e8'
@apply_defaults
def __init__(self,
| bql,
destination_dataset_table = False,
write_disposition='WRITE_EMPTY',
allow_large_results=False,
bigquery_conn_id='bigquery_default',
delegate_to=None,
udf_config=False,
*args,
**kwargs):
"""
Create a new BigQueryOperator.
:param bql: the sql code to be executed
:type bql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
:param destination_dataset_table: A dotted
(<project>.|<project>:)<dataset>.<table> that, if set, will store the results
of the query.
:type destination_dataset_table: string
:param bigquery_conn_id: reference to a specific BigQuery hook.
:type bigquery_conn_id: string
:param delegate_to: The account to impersonate, if any.
For this to work, the service account making the request must have domain-wide
delegation enabled.
:type delegate_to: string
:param udf_config: The User Defined Function configuration for the query.
See https://cloud.google.com/bigquery/user-defined-functions for details.
:type udf_config: list
"""
super(BigQueryOperator, self).__init__(*args, **kwargs)
self.bql = bql
self.destination_dataset_table = destination_dataset_table
self.write_disposition = write_disposition
self.allow_large_results = allow_large_results
self.bigquery_conn_id = bigquery_conn_id
self.delegate_to = delegate_to
self.udf_config = udf_config
def execute(self, context):
logging.info('Executing: %s', str(self.bql))
hook = BigQueryHook(bigquery_conn_id=self.bigquery_conn_id,
delegate_to=self.delegate_to)
conn = hook.get_conn()
cursor = conn.cursor()
cursor.run_query(self.bql, self.destination_dataset_table, self.write_disposition,
self.allow_large_results, self.udf_config)
|
vladiibine/trust-network | src/python/trust_network_backend/tnb/apps/core/views.py | Python | mit | 954 | 0.002096 | import os
from tornado import gen
import tornado.web
class DocsHandler(tornado.web.RequestHandler):
| @gen.coroutine
def get(self):
version = "{}://{}/docs/version/v1.yml".format(
self.request.protocol, self.request.host)
self.render("swagger/index.html", version=version)
class HomeHandler(tornado.web.StaticFileHandler):
@gen.coroutine
def get(self, path, include_body=True):
yield super().get('index.html', include_body)
class CachingFrontendHandle | r(tornado.web.StaticFileHandler):
"""If a file exists in the root folder, serve it, otherwise serve index.html
"""
@gen.coroutine
def get(self, path, include_body=True):
absolute_path = self.get_absolute_path(self.root, path)
is_file = os.path.isfile(absolute_path)
if is_file:
yield super().get(path, include_body)
else:
yield super().get('index.html', include_body)
|
egbertbouman/tribler-g | Tribler/UPnP/common/objectconsole.py | Python | lgpl-2.1 | 2,734 | 0.006584 | # Written by Ingar Arntzen
# see LICENSE.txt for license information
"""
This module implements a generic console interface that can
be attached to any runnable python object.
"""
import code
import __builtin__
import threading
import exceptions
##############################################
# OBJECT CONSOLE
##############################################
class ConsoleError(exceptions.Exception):
"""Error associated with the console."""
pass
class ObjectConsole:
"""
This class runs a python console in the main thread, and starts
a given Object in a second thread.
The Object is assumed to implement at least two methods, run() and stop().
- The run() method is the entry point for the thread.
- The stop() method is used by the main thread to request that the
object thread does a controlled shutdown and returns from the run method.
If the worker thread does not return from run() within 2 seconds after stop()
has been invoked, the console terminates the object thread more aggressively.
AttributeNames of Object listed in the provided namespace will be
included in the console namespace.
"""
TIMEOUT = 2
def __init__(self, object_, name_space=None, run='run',
stop='stop', name=""):
self._object = object_
self._object | _run = getattr(object_, run)
self._object_stop = getattr(object_, stop)
self._thread = threading.Thread(group=None,
target=self._object_run,
name="ObjectT | hread")
# Configure Console Namespace
self._name_space = {}
self._name_space['__builtiname_space__'] = __builtin__
self._name_space['__name__'] = __name__
self._name_space['__doc__'] = __doc__
self._name_space['help'] = self._usage
if name_space and isinstance(name_space, type({})):
self._name_space.update(name_space)
self._app_name_space = name_space
self._app_name = name
self._usage()
def _usage(self):
"""Print usage information."""
print "\nConsole:", self._app_name
for key in self._app_name_space.keys():
print "- ", key
print "- help"
def run(self):
"""Starts the given runnable object in a thread and
then starts the console."""
self._thread.start()
try:
code.interact("", None, self._name_space)
except KeyboardInterrupt:
pass
self._object_stop()
self._thread.join(ObjectConsole.TIMEOUT)
if self._thread.isAlive():
raise ConsoleError, "Worker Thread still alive"
|
bbtfr/MarkdownLight | tests/test_markdown_light.py | Python | mit | 22,066 | 0.005121 | import syntax_test
class TestMarkdownLight(syntax_test.SyntaxTestCase):
def setUp(self):
super().setUp()
self.set_syntax_file("Packages/MarkdownLight/MarkdownLight.tmLanguage")
def check_default(self, patterns):
self.check_in_single_scope(patterns, 'text')
def test_simple_text(self):
self.set_text('A B C')
self.check_default('A B C')
def test_italic(self):
self.set_text('''
A *B* _C_ D
*E*
''')
self.check_eq_scope([ r'\*B\*', '_C_', r'\*E\*' ], 'markup.italic')
self.check_eq_scope(r'[\*_]', 'punctuation.definition')
self.check_default(list('AD '))
def test_bold(self):
self.set_text('''
A **B** __C__ D
**E**
''')
self.check_eq_scope([ r'\*\*B\*\*', r'__C__', r'\*\*E\*\*' ], 'markup.bold')
self.check_eq_scope(r'[\*_]+', 'punctuation.definition')
self.check_default(list('AD '))
def test_inline_markup_inside_inline_markup(self):
self.set_text('''
A *B **C** D* E
F **G *H* I** J
''')
self.check_eq_scope(r'\*B \*\*C\*\* D\*', 'markup.italic')
self.check_eq_scope(r'\*H\*', 'markup.italic')
self.check_eq_scope(r'\*\*C\*\*', 'markup.bold')
self.check_eq_scope(r'\*\*G \*H\* I\*\*', 'markup.bold')
self.check_eq_scope(r'\*+', 'punctuation.definition')
self.check_default(list('AEFJ'))
def test_bold_italic(self):
self.set_text('''
AA *__AB__* AC
BA _**BB**_ BC
CA **_CB_** CC
DA __*DB*__ DC
EA ***EB*** EC
FA ___FB___ FC
''')
self.check_eq_scope(r'\*__AB__\*', 'markup.italic')
self.check_eq_scope(r'_\*\*BB\*\*_', 'markup.italic')
self.check_eq_scope([ '_CB_', r'\*DB\*' ], 'markup.italic')
self.check_eq_scope([ '__AB__', r'\*\*BB\*\*' ], 'markup.bold')
self.check_eq_scope(r'\*\*_CB_\*\*', 'markup.bold')
self.check_eq_scope(r'__\*DB\*__', 'markup.bold')
self.check_eq_scope(r'\*+|_+', 'punctuation.definition')
self.check_eq_scope(r'\*\*\*EB\*\*\*', 'markup.bold')
self.check_eq_scope(r'\*\*\*EB\*\*\*', 'markup.italic')
self.check_eq_scope(r'___FB___', 'markup.bold')
self.check_eq_scope(r'___FB___', 'markup.italic')
self.check_default([ r'[A-Z]A ', r' [A-Z]C\n' ])
def test_multiline_markup_not_supported(self):
# Multiline inline markup is not supported due to
# limitations in syntax definition language.
self.set_text('''
A **B
C** D
E _F
G_ H
''')
self.check_default('.+')
def test_inline_markup_before_punctuation(self):
self.set_text('''
A *B*: *C*; *D*, *E*. *F*? G
K **L**: **M**; **N**, **O**. **P**? Q
''')
self.check_eq_scope([
r'\*B\*', r'\*C\*', r'\*D\*', r'\*E\*', r'\*F\*'
], 'markup.italic')
self.check_eq_scope([
r'\*\*L\*\*', r'\*\*M\*\*', r'\*\*N\*\*',
r'\*\*O\*\*', r'\*\*P\*\*'
], 'markup.bold')
self.check_eq_scope(r'\*+', 'punctuation.definition')
self.check_default(r'[AGKQ:;,\.?]')
def test_inline_markup_inside_quotes_and_brackets(self):
self.set_text('''
A "*B*" (*C*) '*D*' E
K "**L**" (**M**) '**N**' O
''')
self.check_eq_scope([ r'\*B\*', r'\*C\*', r'\*D\*' ], 'markup.italic')
self.check_eq_scope([ r'\*\*L\*\*', r'\*\*M\*\*', r'\*\*N\*\*' ], 'markup.bold')
self.check_eq_scope(r'\*+', 'punctuation.definition')
self.check_default(r'''[AEKQ"\(\)'\.]''')
def test_inline_markup_outside_quotes_and_brackets(self):
self.set_text('''
*"A"* *(B)* *'C'*
**"D"** **(E)** **'F'**
*"A";* *(B).* *'C':*
**"D"!** **(E)?** **'F',**
Z
''')
self.check_eq_scope([ r'\*"A"\*', r'\*\(B\)\*', r"\*'C'\*" ], 'markup.italic')
self.check_eq_scope([ r'\*\*"D"\*\*', r'\*\*\(E\)\*\*', r"\*\*'F'\*\*" ], 'markup.bold')
self.check_eq_scope([ r'\*"A";\*', r'\*\(B\)\.\*', r"\*'C':\*" ], 'markup.italic')
self.check_eq_scope([ r'\*\*"D"!\*\*', r'\*\*\(E\)\?\*\*', r"\*\*'F',\*\*" ], 'markup.bold')
self.check_default('Z')
def test_brackets_inside_inline_markup(self):
self.set_text('''
*A (B C)*: D
*(K)* **(L)**
''')
self.check_eq_scope([ r'\*A \(B C\)\*', r'\*\(K\)\*' ] , 'markup.italic')
self.check_eq_scope( r'\*\*\(L\)\*\*', 'markup.bold')
self.check_eq_scope(r'\*+', 'punctuation.definition')
self.check_default(r': D')
def test_inline_markup_combinations(self):
self.set_text('_A _ B_C D_E _ F_ *G* **H** <a>_I_</a>')
self.check_eq_scope([ '_A _ B_C D_E _ F_',
r'\*G\*', '_I_' ], 'markup.italic')
self.check_eq_scope(r'\*\*H\*\*', 'markup.bold')
def test_escaping_of_inline_punctuation(self):
self.set_text(r'A *\*B\** C **D\*** E')
self.check_eq_scope(r'\*\\\*B\\\*\*', 'markup.italic')
self.check_eq_scope(r'\*\*D\\\*\*\*', 'markup.bold')
self.check_default(list('ACE '))
def test_inline_markup_does_not_work_inside_words(self):
self.set_text('A_B C_D_E')
self.check_default('.+')
def test_inline_markup_does_not_work_without_text(self):
self.set_text('''
A ____ B
''')
self.check_default('^.+$')
def test_valid_ampersands(self):
self.set_text('''
&
&&
A & B
A && B
& A &B && C &&D E& F&&
&G;
''')
self.check_no_scope('^.+$', 'invalid')
def test_valid_brackets(self):
self.set_text('''
<
<<
A < B
A << B
A<
A<<
''')
self.check_no_scope('^.+$', 'invalid')
def test_headings(self):
self.set_text('''
# A
## B
### C
#### D
##### E
###### F
G
#K
##L#
### M ##
#### N ###########
O
''')
self.check_eq_scope(list('ABCDEFKLMN'), 'entity.name.section')
self.check_in_scope(list('ABCDEFKLMN# '), 'markup.heading')
self.check_eq_scope(r'#+', 'punctuation.definition')
self.check_default(list('GO'))
def test_setext_headings(self):
self.set_text('''
A
===
B
---
C
D
=======
E
F
-------
Z
''')
self.check_eq_scope('=+', 'markup.heading.1')
self.check_eq_scope('-+', 'markup.heading.2')
self.check_default(r'\w+')
def test_not_setext_headings(self):
self.set_text('''
- A
===
> B
---
C
=======
D
--
E
- - -
-------
-------
========
Z
''')
self.check_no_scope('.+', 'markup.heading')
def test_inline_markup_inside_headings(self):
self.set_text('''
#_A_
## B _C_
### D _E_ F
#### K _L M_ N #
Z
''')
self.check_eq_scope([
'_A_', 'B _C_', 'D _E_ F', 'K _L M_ N'
], 'entity.name.section')
self.check_in_scope(list('ABCDEFKLMN#_ '), 'markup.heading')
self.check_eq_scope([ '_A_', '_C_', '_E_', '_L M_' ], 'markup.italic')
self.check_eq_scope(r'#+', 'punctuation.definition')
self.check_default(r'Z')
def test_fenced_paragraph(self):
self.set_text('''
K
```
A
```
L
''')
self.check_eq_scope(r'```\nA\n```\n', 'markup.raw.block.fenced')
self.check_eq_scope('`+', 'punctuation.definition')
self.check_default([ r'K\n\n', r'\nL\n' ])
def test_fenced_block_inside_paragraph(self):
self.set_text('''
K
```
A
```
L
''')
self.check_eq_scope(r'```\nA\n```\n', 'markup.raw.block.fenced')
self.check_eq_scope('`+', 'punctuation.definition')
self.check_default([ r'\nK\n', r'L\n\n' ])
def test_syntax_highlighting_inside_fenced_blocks(self):
self.set_text('''
``` c++
int x = 123;
```
```python
def g():
return 567 |
```
''')
self.check_eq_scope([ 'int', 'def' ], 'storage.type')
self.check_eq_scope([ | '123', '567' ], 'constant.numeric')
self.check_eq_scope('g', 'entity.name')
self.check_eq_scope('return', 'keyword.control')
def test_indented_raw_blocks(self):
self.set_text('''
A
B
C
''')
self.check_eq_scope(r' B\n', 'markup.raw.block')
self.check_default([ r'\nA\n\n', r'\nC\n' ])
def test_multiline_indented_raw_blocks(self):
self.set_text('''
A
B
''')
self.check_eq_scope(r' A\n B\n', 'markup.raw.block')
def test_indented_raw_b |
TianpeiLuke/GPy | GPy/util/linalg_gpu.py | Python | bsd-3-clause | 3,189 | 0.011602 | # Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
#
# The utility functions for GPU computation
#
import numpy as np
from ..util import gpu_init
try:
from pycuda.reduction import ReductionKernel
from pycuda.elementwise import ElementwiseKernel
# log|A| for A is a low triangle matrix
# logDiagSum(A, A.shape[0]+1)
logDiagSum = ReductionKernel(np.float64, neutral="0", reduce_expr="a+b", map_expr="i%step==0?log(x[i]):0", arguments="double *x, int step")
strideSum = ReductionKernel(np.float64, neutral="0", reduce_expr="a+b", map_expr="i%step==0?x[i]:0", arguments="double *x, int step")
# np.trace(np.dot(A,B)) (also equivalent to (A*B.T).sum() ) A - a1 x a2, B - a2 x a1
traceDot = ReductionKernel(np.float64, neutral="0", reduce_expr="a+b", map_expr="A[i]*B[(i%a1)*a2+i/a1]", arguments="double *A, double *B, int a1, int a2")
#=======================================================================================
# Element-wise functions
#=======================================================================================
# log(X)
log = ElementwiseKernel("double *in, double *out", "out[i] = log(in[i])", "log_element")
# log(1.0-X)
logOne = ElementwiseKernel("double *in, double *out", "out[i] = log(1.-in[i])", "logOne_element")
# multiplication with broadcast on the last dimension (out = shorter[:,None]*longer)
mul_bcast = ElementwiseKernel("double *out, double *shorter, double *longer, int shorter_size", "out[i] = longer[i]*shorter[i%shorter_size]", "mul_bcast")
# multiplication with broadcast on the first dimension (out = shorter[None,:]*longer)
mul_bcast_first = ElementwiseKernel("double *out, double *shorter, double *longer, int first_dim", "out[i] = longer[i]*shorter[i/first_dim]", "mul_bcast")
# sum through the middle dimension (size_2) of a 3D matrix (size_1, size_2, size_3)
sum_axis = ElementwiseKernel("double *out, double *in, int size_1, int size_2", "out[i] += sum_axis_element(in, size_1, size_2, i)", "sum_axis",preamble="""
__device__ double sum_axis_element(double *in, int size_1, int size_2, int idx)
{
int k = idx/size_1;
int i = idx%size_1;
double sum=0;
for(int j=0;j<size_2;j++) {
sum += in[(k*size_2+j)*size_1+i];
}
return sum;
}
""")
# the outer product between two vectors (out = np.dot(v1,v2.T))
outer_prod = ElementwiseKernel("double *out, double *v1, double *v2, int v1_size", "out[i] = v1[i%v1_size]*v2[i/v1_size]", "outer_prod")
# the outer product between two vectors (out = | np.einsum('na,nb->nab',m1,m2) a=dim1, b=dim2 )
join_prod = ElementwiseKernel("double *out, double *m1, double *m2, int dim1, int dim2", "out[i] = m1[(i%dim1)*dim1+(i%(dim1*dim2))/dim1]*m2[(i%dim1)*dim1+i/(dim1*dim2)]", "join_prod")
except:
pass
try:
import scikits.cuda.linalg as culinalg
from scikits.cuda import cublas
from scikits.cuda.cula import culaExceptions
except:
pass
| |
stevegt/isconf4 | lib/python/isconf/Cache.py | Python | gpl-2.0 | 23,037 | 0.007727 | from __future__ import generators
import ConfigParser
import copy
import email.Message
import email.Parser
import email.Utils
import errno
import hmac
import inspect
import md5
import os
import popen2
import random
import re
import select
import sha
import shutil
import socket
import sys
import tempfile
import time
import urllib2
import isconf
from isconf.Errno import iserrno
from isconf.Globals import *
from isconf.fbp822 import fbp822
from isconf.Kernel import kernel
(START,IHAVE,SENDME) = range(3)
# XXX the following were migrated from 4.1.7 for now -- really need to
# be FBP components, at least in terms of logging
class Cache:
"""a combined cache manager and UDP mesh -- XXX needs to be split
>>> pid = os.fork()
>>> if not pid:
... time.sleep(999)
... sys.exit(0)
>>> os.environ["HOSTNAME"] = "testhost"
>>> os.environ["IS_HOME"] = "/tmp/var/is"
>>> cache = Cache(54321,54322)
>>> assert cache
>>> os.kill(pid,9)
"""
def __init__(self,udpport,httpport,timeout=2):
# XXX kludge -- what we really need is a dict which
# shows the "mirror list" of all known locations for
# files, rather than self.req
self.req = {}
self.udpport = udpport
self.httpport = httpport
self.timeout = float(timeout)
self.lastSend = 0
self.sock = None
self.fetched = {}
self.nets = self.readnets()
self.sendq = []
# temporary uid -- uniquely identifies host in non-persistent
# packets. If we want something permanent we should store it
# somewhere under private.
self.tuid = "%s@%s" % (random.random(),
os.environ['HOSTNAME'])
class Path: pass
self.p = Path()
home = os.environ['IS_HOME']
# XXX redundant with definitions in ISFS.py -- use a common lib?
self.p.cache = os.path.join(home,"fs/cache")
self.p.private = os.path.join(home,"fs/private")
self.p.announce = "%s/.announce" % (self.p.private)
self.p.pull = "%s/.pull" % (self.p.private)
for d in (self.p.cache,self.p.private):
if not os.path.isdir(d):
os.makedirs(d,0700)
def readnets(self):
# read network list
nets = {'udp': [], 'tcp': []}
netsfn = os.environ.get('IS_NETS',None)
debug("netsfn", netsfn)
if netsfn and os.path.exists(netsfn):
netsfd = open(netsfn,'r')
for line in netsfd:
(scheme,addr) = line.strip().split()
nets[scheme].append(addr)
debug("nets", str(nets))
return nets
def ihaveTx(self,path):
path = path.lstrip('/')
fullpath = os.path.join(self.p.cache,path)
mtime = 0
if not os.path.exists(fullpath):
warn("file gone: %s" % fullpath)
return
mtime = getmtime_int(fullpath)
reply = FBP.msg('ihave',tuid=self.tuid,
file=path,mtime=mtime,port=self.httpport,scheme='http')
HMAC.msgset(reply)
self.bcast(str(reply))
def bcast(self,msg):
# XXX only udp supported so far
debug("bcast")
addrs = self.nets['udp']
if not os.environ.get('IS_NOBROADCAST',None):
addrs.append('<broadcast>')
for addr in addrs:
if len(self.sendq) > 20:
| debug("sendq overflow")
return
self.sendq.append((msg,addr,self.udpport))
def sender(self):
while True:
yield None
yield kernel.sigsleep, 1
while len( | self.sendq):
msg,addr,udpport = self.sendq.pop(0)
try:
debug("sendto", addr, msg)
self.sock.sendto(msg,0,(addr,udpport))
except:
info("sendto failed: %s" % addr)
self.sendq.append((msg,addr,udpport))
yield kernel.sigsleep, 1
yield kernel.sigsleep, self.timeout/5.0
def ihaveRx(self,msg,ip):
yield None
scheme = msg['scheme']
port = msg['port']
path = msg['file']
mtime = msg.head.mtime
# XXX is python's pseudo-random good enough here?
#
# probably, but for other cases, use 'gpg --gen-random 2 16'
# to generate 128 bits of random data from entropy
#
challenge = str(random.random())
url = "%s://%s:%s/%s?challenge=%s" % (scheme,ip,port,path,challenge)
path = path.lstrip('/')
# simple check to ignore foreign domains
# XXX probably want to make this a list of domains
domain = os.environ['IS_DOMAIN']
if not path.startswith(domain + '/'):
debug("foreign domain, ignoring: %s" % path)
return
fullpath = os.path.join(self.p.cache,path)
mymtime = 0
debug("checking",url)
if os.path.exists(fullpath):
mymtime = getmtime_int(fullpath)
if mtime > mymtime:
debug("remote is newer:",url)
if self.req.has_key(path):
self.req[path]['state'] = SENDME
yield kernel.wait(self.wget(path,url,challenge))
elif mtime < mymtime:
debug("remote is older:",url)
self.ihaveTx(path)
else:
debug("remote and local times are the same:",path,mtime,mymtime)
def puller(self):
tmp = "%s.tmp" % self.p.pull
while True:
timeout= self.timeout
yield None
# get list of files
if not os.path.exists(self.p.pull):
# hmm. we must have died while pulling
if os.path.exists(tmp):
old = open(tmp,'r').read()
open(self.p.pull,'a').write(old)
open(self.p.pull,'a')
os.rename(self.p.pull,tmp)
# files = open(tmp,'r').read().strip().split("\n")
data = open(tmp,'r').read()
if not len(data):
open(self.p.pull,'a')
yield kernel.sigsleep, 1
continue
files = data.strip().split("\n")
# create requests
for path in files:
path = path.lstrip('/')
fullpath = os.path.join(self.p.cache,path)
mtime = 0
if os.path.exists(fullpath):
mtime = getmtime_int(fullpath)
req = FBP.msg('whohas',file=path,newer=mtime,tuid=self.tuid)
HMAC.msgset(req)
self.req.setdefault(path,{})
self.req[path]['msg'] = req
self.req[path]['expires'] = time.time() + timeout
self.req[path]['state'] = START
while True:
# send requests
yield None
debug("calling resend")
self.resend()
yield kernel.sigsleep, 1
# see if they've all been filled or timed out
# debug(str(self.req))
if not self.req:
# okay, all done -- touch the file so ISFS knows
open(self.p.pull,'a')
break
def resend(self):
"""(re)send outstanding requests"""
if time.time() < self.lastSend + .5:
return
self.lastSend = time.time()
paths = self.req.keys()
for path in paths:
debug("resend", self.req[path]['expires'], path, self.req[path])
if self.req[path]['state'] > START:
# file is being fetched
debug("resend fetching")
pass
elif time.time() > self.req[path]['expires']:
# fetch never started
debug("timeout",path)
del self.req[path]
continue
req = self.req[path]['msg']
debug("calling bcast")
self.bcast(str(req))
def flush(self):
if not os.path.exists(self.p.announce):
return
tmp = "%s.tmp |
BorisJeremic/Real-ESSI-Examples | analytic_solution/test_cases/Contact/Stress_Based_Contact_Verification/SoftContact_NonLinHardSoftShear/Area/Normalized_Shear_Stress_Plot.py | Python | cc0-1.0 | 6,112 | 0.017507 | #!/usr/bin/python
import h5py
import matplotlib.pylab as plt
import matplotlib as mpl
import sys
import numpy as np;
plt.rcParams.update({'font.size': 28})
# set tick width
mpl.rcParams['xtick.major.size'] = 10
mpl.rcParams['xtick.major.width'] = 5
mpl.rcParams['xtick.minor.size'] = 10
mpl.rcParams['xtick.minor.width'] = 5
plt.rcParams['xtick.labelsize']=24
mpl.rcParams['ytick.major.size'] = 10
mpl.rcParams['ytick.major.width'] = 5
mpl.rcParams['ytick.minor.size'] = 10
mpl.rcParams['ytick.minor.width'] = 5
plt.rcParams['ytick.labelsize']=24
plt.style.use('grayscale')
###############################################################
## Area = 1*m^2
###############################################################
# Go over each feioutput and plot each one.
thefile = "A_1/Analytical_Solution_Shear.feioutput";
finput = h5py.File(thefile)
# Read the time and displacement
times = finput["time"][:]
shear_strain_x = finput["/Model/Elements/Element_Outputs"][4,:]
shear_strain_y = finput["/Model/Elements/Element_Outputs"][5,:]
shear_stress_x = finput["/Model/Elements/Element_Outputs"][7,:]
shear_stress_y = finput["/Model/Elements/Element_Outputs"][8,:]
normal_stress = -finput["/Model/Elements/Element_Outputs"][9,:];
shear_strain = np.sqrt(shear_strain_x*shear_strain_x + shear_strain_y*shear_strain_y) ;
shear_stress = np.sqrt(shear_stress_x*shear_stress_x + shear_stress_y*shear_stress_y );
shear_stress = shear_stress_x;
shear_strain = shear_strain_x;
# Configure the figure filename, according to the input filename.
outfig=thefile.replace("_","-")
outfigname=outfig.replace("h5.feioutput","pdf")
# Plot the figure. Add labels and titles.
plt.figure(figsize=(12,10))
plt.plot(shear_strain*5,shear_stress/normal_stress,label=r'Area = $1 m^2$', Linewidth=4)
plt.xlabel(r"Shear Displacement $\Delta_t [mm]$")
plt.ylabel(r"Normalized Shear Stress $\tau/\sigma_n$")
###############################################################
## Area = 1e^2 m^2
###############################################################
# Go over each feioutput and plot each one.
thefile = "A_1e2/Analytical_Solution_Shear.feioutput";
finput = h5py.File(thefile)
# Read the time and displacement
times = finput["time"][:]
shear_strain_x = finput["/Model/Elements/Element_Outputs"][4,:]
shear_strain_y = finput["/Model/Elements/Element_Outputs"][5,:]
shear_stress_x = finput["/Model/Elements/Element_Outputs"][7,:]
shear_stress_y = finput["/Model/Elements/Element_Outputs"][8,:]
normal_stress = -finput["/Model/Elements/Element_Outputs"][9,:];
shear_strain = np.sqrt(shear_strain_x*shear_strain_x + shear_strain_y*shear_strain_y) ;
shear_stress = np.sqrt(shear_stress_x*shear_stress_x + shear_stress_y*shear_stress_y );
shear_stress = shear_stress_x;
shear_strain = shear_strain_x;
# Configure the figure filename, according to the input filename.
outfig=thefile.replace("_","-")
outfigname=outfig.replace("h5.feioutput","pdf")
# Plot the figure. Add labels and titles.
plt.plot(shear_strain*5,shear_stress/normal_stress,label=r'Area = $1e^2 m^2$', Linewidth=4)
plt.xlabel(r"Shear Displacement $\Delta_t [mm]$")
plt.ylabel(r"Normalized Shear Stress $\tau/\sigma_n$")
###############################################################
## Area = 1e^-2 m^2
###############################################################
# Go over each feioutput and plot each one.
thefile = "A_1e-2/Analytical_Solution_Shear.feioutput";
finput = h5py.File(thefile)
# Read the time and displacement
times = finput["time"][:]
shear_strain_x = finput["/Model/Elements/Element_Outputs"][4,:]
shear_strain_y = finput["/Model/Elements/Element_Outputs"][5,:]
shear_stress_x = finput["/Model/Elements/Element_Outputs"][7,:]
shear_stress_y = finput["/Model/Elements/Element_Outputs"][8,:]
normal_stress = -finput["/Model/Elements/Element_Outputs"][9,:];
shear_strain = np.sqrt(shear_strain_x*shear_strain_x + shear_strain_y*shear_strain_y) ;
she | ar_stress = np.sqrt(shear_stress_x*shear_stress_x + shear_stress_y*shear_stress_y );
shear_stress = shear_stress_x;
shear_strain = shear_strain_x;
# Configure the figure filename, according to the input filename.
outfig=thefile.replace("_","-")
outfigname=outfig.replace("h5.feioutput","pdf")
# Plot the figure. Add labels and titles.
plt.plot(shear_strain*5,shear_stress/normal_stress,label=r'Area = $1e^{-2} m^2$', Linewidth=4)
plt.xlabel(r"Shear Displacement $\Delta_t | [mm]$")
plt.ylabel(r"Normalized Shear Stress $\tau/\sigma_n$")
###############################################################
## Area = 1e^-4 m^2
###############################################################
# Go over each feioutput and plot each one.
thefile = "A_1e-4/Analytical_Solution_Shear.feioutput";
finput = h5py.File(thefile)
# Read the time and displacement
times = finput["time"][:]
shear_strain_x = finput["/Model/Elements/Element_Outputs"][4,:]
shear_strain_y = finput["/Model/Elements/Element_Outputs"][5,:]
shear_stress_x = finput["/Model/Elements/Element_Outputs"][7,:]
shear_stress_y = finput["/Model/Elements/Element_Outputs"][8,:]
normal_stress = -finput["/Model/Elements/Element_Outputs"][9,:];
shear_strain = np.sqrt(shear_strain_x*shear_strain_x + shear_strain_y*shear_strain_y) ;
shear_stress = np.sqrt(shear_stress_x*shear_stress_x + shear_stress_y*shear_stress_y );
shear_stress = shear_stress_x;
shear_strain = shear_strain_x;
# Configure the figure filename, according to the input filename.
outfig=thefile.replace("_","-")
outfigname=outfig.replace("h5.feioutput","pdf")
# Plot the figure. Add labels and titles.
plt.plot(shear_strain*5,shear_stress/normal_stress,label=r'Area = $1e^{-4} m^2$', Linewidth=4)
plt.xlabel(r"Shear Displacement $\Delta_t [mm]$")
plt.ylabel(r"Normalized Shear Stress $\tau/\sigma_n$")
########################################################
# # axes = plt.gca()
# # axes.set_xlim([-7,7])
# # axes.set_ylim([-1,1])
outfigname = "Normalized_Shear_Stress.pdf";
legend = plt.legend()
legend.get_frame().set_linewidth(0.0)
legend.get_frame().set_facecolor('none')
plt.savefig(outfigname, bbox_inches='tight')
# plt.show()
|
ceeblet/OST_PythonCertificationTrack | Python2/MoreGuiLayout_homework/src/moreframesandbuttons2.py | Python | mit | 3,128 | 0.003836 | import os
from tkinter import *
ALL = N+S+E+W
class Application(Frame):
def __init__(self, master=None):
Frame.__init__(self, master)
self.master.rowconfigure(0, weight=1)
self.master.columnconfigure(0, weight=1)
self.grid(sticky=ALL)
root.bind("<Return>", self.file_open)
self.f1 = Frame(self, bg="red", width=100, height=75)
self.f1.grid(row=0, column=0, rowspan=2, columnspan=2, sticky=ALL)
self.f2 = Frame(self, bg="green", width=100, height=75)
self.f2.grid(row=1, column=0, rowspan=2, columnspan=2, sticky=ALL)
self.f3 = Frame(self, bg="blue", width=150, height=150)
self.f3.grid(row=0, column=2, rowspan=3, columnspan=3, sticky=ALL)
self.file_name_field = Entry(self.f3, bg="#fff")
self.file_name_field.pack(anchor=N, side=TOP, fill=X, expand=False)
self.file_contents = Text(self.f3, width=10, height=10)
self.file_contents.pack(anchor=N, side=TOP, fil | l=BOTH, expand=True)
self.rowconfigure(0, weight=1)
self.rowconfig | ure(1, weight=1)
self.label1 = Label(self.f1, bg="red", text="Frame 1")
self.label1.grid(row=0, sticky=ALL)
self.label2 = Label(self.f2, bg="green", text="Frame 2")
self.label2.grid(row=1, sticky=ALL)
# self.label3 = Label(self.f3, bg="blue", text="Frame 3")
# self.label3.grid(row=0, sticky=ALL)
button_names = ["red", "blue", "green", "black", "open"]
for c in range(5):
self.columnconfigure(c, weight=1)
button = Button(self, text="{0}".format(button_names[c]))
button.grid(row=3, column=c, sticky=ALL)
if button_names[c] == "open":
button.bind("<Button-1>", self.file_open)
else:
button.bind("<Button-1>", lambda event, color=button_names[c]: self.change_color(event, color))
# self.label1.bind("<Button-1>", self.click_report)
# self.label2.bind("<Button-1>", self.click_report)
self.f1.bind("<Button-1>", self.click_report)
self.f2.bind("<Button-1>", self.click_report)
def click_report(self, event):
frm = ""
location = event.widget.grid_info()
frm_coords = (location["row"], location["column"])
if frm_coords == ("0", "0"):
frm = "Frame 1"
elif frm_coords == ("1", "0"):
frm = "Frame 2"
print("user button 1 click in {0} at ({1}, {2})".format(frm, event.x, event.y))
def file_open(self, event):
self.file_contents.delete(1.0, END)
fname = self.file_name_field.get()
if os.path.exists(fname):
print("Opening file: {0}".format(fname))
lines = open(fname, 'r').readlines()
for line in lines:
self.file_contents.insert(INSERT, line)
else:
print("The file does not exist, please try again.")
def change_color(self, event, color):
self.file_contents.configure(fg=color)
root = Tk()
app = Application(master=root)
app.mainloop() |
danielgavrilov/schools | db/insert_schools.py | Python | mit | 1,894 | 0.00264 | from pymongo import MongoClient
from helpers import db_url, db_database, pa | rse_csv, to_int, to_float
mongo = MongoClient(db_url)
db = mongo[db_database]
print("Updating school information from 2015 data...")
for school in parse_csv("../data/2015/ks5_attainment.csv"):
# All schools are RECTYPE=1. Other RECTYPEs are used for local averages. |
# Closed schools are ICLOSE=1. We skip them too.
if (school["RECTYPE"] != "1") or (school["ICLOSE"] == "1"):
continue
db.schools.update(
{ "_id": school["URN"] },
{ "$set": {
"lea": to_int(school["LEA"]),
"name": school["SCHNAME"],
"address": [school["ADDRESS1"], school["ADDRESS2"], school["ADDRESS3"]],
"town": school["TOWN"],
"postcode": school["PCODE"],
"phone": school["TELNUM"],
"type": school["NFTYPE"],
"religious": school["RELDENOM"],
"admissions": school["ADMPOL"],
"gender": school["GENDER1618"].capitalize(),
"ages": school["AGERANGE"],
"performance.2015.students": {
"16-18": to_int(school["TPUP1618"], True),
"ks5": to_int(school["TALLPUPA"]),
"academic": to_int(school["TALLPUP_ACADA"]),
"vocational": to_int(school["TALLPUP_VQA"]),
"a-level": to_int(school["TALLPUP_ALEVA"])
},
"performance.2015.aps.a-level": {
"student": to_float(school["APSFTE_ALEVA"], True),
"entry": to_float(school["TALLPPE_ALEVA"], True)
}
}
},
upsert=True
)
if (school["TPUP1618"] == "NEW"):
db.schools.update({"_id": school["URN"]}, {"$set": {"new": True}})
mongo.close()
print("\nDone.")
|
Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_08_01/operations/_express_route_service_providers_operations.py | Python | mit | 5,203 | 0.004421 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.mgmt.core.exceptions import ARMErrorFormat
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteServiceProvidersOperations(object):
"""ExpressRouteServiceProvidersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_08_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
| self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExpressRouteServiceProviderListResult"]
"""Gets all the available express route service providers.
:keyword callable cls: | A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteServiceProviderListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_08_01.models.ExpressRouteServiceProviderListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteServiceProviderListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteServiceProviderListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteServiceProviders'} # type: ignore
|
liamneath1/SUMOWaferBackend | PythonBackend/EmailObject.py | Python | mit | 1,333 | 0.009002 | import hashlib
import sys
import datetime
class EmailObject:
def __init__(self, sender, date, subject, body):
self.sender = sender
self.date = datetime.datetime.strptime(date[5:25], '%d %b %Y %H:%M:%S') #TODO: FIX THIS CODE TO BE MORE ROBUST
self.body = body
self.subject = subject
self.emailId = self.createEmailId()
def createEmailId(self):
| hasher = hashlib.md5()
hasher.update(self.sender.encode() + self.date.strftime('%Y-%m-%d %H:%M:%S').encode() + self.subject.encode())
hasher.upda | te(self.body.encode())
result = int(hasher.hexdigest(), 16) % sys.maxsize
return result
def fetchEmailMain(self):
sqlLine = "('" + self.sender + "','" + self.date.strftime('%Y-%m-%d %H:%M:%S') + "','" + self.subject + "'," + str(self.emailId) + ")"
return sqlLine
def fetchEmailBodySql(self):
sqlLine = "(" + str(self.emailId) + ",'" + self.body + "')"
return sqlLine
def __str__(self):
descriptivestr = "Sender: " + "'" + self.sender + "'" + "\n"
descriptivestr += "Date: " + "'" + self.date.strftime('%Y-%m-%d %H:%M:%S') +"'" +"\n"
descriptivestr += "Subject: " + "'" + self.subject + "'" + "\n"
descriptivestr += "Body: " + self.body + "\n"
return descriptivestr
|
anhstudios/swganh | data/scripts/templates/object/static/structure/tatooine/shared_planter_hanging_style_01.py | Python | mit | 463 | 0.047516 | #### NOTICE: THIS FILE IS A | UTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Static()
result.template = "object/static/structure/tatooine/shared_planter_hanging_style_01.iff"
result.attribute_template_id = -1
result.stfName("obj_n","unknown_objec | t")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
JulienLeonard/PVG | tests/test_geoquad.py | Python | gpl-2.0 | 502 | 0.031873 | from utils import *
from geoutils import *
from polygon import *
from circle import *
from geoquad import *
import unittest
class GeoQuad | Test(unittest.TestCase):
def test_init(self):
geoquad = GeoQuad.square(Point(1.0,1.0),2.0)
self.assertEqual(geoquad.xpoint(Point(0.5,0.5)).coords(),(1.0,1.0))
def test_split(self):
geoquad = GeoQuad.square(Point(1.0,1.0),2.0)
sel | f.assertEqual(len(geoquad.xsplit(0.5)),2)
|
dmacvicar/spacewalk | client/solaris/rhnclient/compile.py | Python | gpl-2.0 | 3,533 | 0.001698 | #!/usr/bin/python
#
# Copyright (c) 2008 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
# python compiler. "Borrowed" from Python's py_compile module. As
# opposed to the Python one, this script returns error codes when a
# compile fails, so it can be used from Makefiles
#
import os
import sys
import marshal
import traceback
import string
import __builtin__
import imp
MAGIC = imp.get_magic()
if len(sys.argv) != 3:
print "Usage:", sys.argv[0], "infile outfile"
sys.exit(-1)
infile = sys.argv[1]
outfile = sys.argv[2]
def wr_long(f, x):
"Internal; write a 32-bit int to a file in little-endian order."
f.write(chr( x & 0xff))
f.write(chr((x >> 8) & 0xff))
f.write(chr((x >> 16) & 0xff))
f.write(chr((x >> 24) & 0xff))
def compile(file, cfile=None, dfile=None):
"""Byte-compile one Python source file to Python bytecode.
Arguments:
file: source filename
cfile: target filename; defaults to source with 'c' or 'o' appended
('c' normally, 'o' in optimizing mode, giving .pyc or .pyo)
dfile: purported filename; defaults to source (this is the filename
that will show up in error messages)
Note that it isn't necessary to byte-compile Python modules for
execution efficiency -- Python itself byte-compiles a module when
it is loaded, and if it can, writes out the bytecode to the
corresponding .pyc (or .pyo) file.
However, if a Python installation is shared between users, it is a
good idea to byte-compile all modules upon installation, since
other users may not be able to write in the source directories,
and thus they won't be able to write the .pyc/.pyo file, and then
they would be byte-compiling every module each time it is loaded.
This can slow down program start-up considerably.
See compileall.py for a script/module that uses this module to
byte-compile all installed files (or all files in selected
directories).
"""
f = open(file)
try:
timestamp = long(os.fstat(f.fileno())[8])
except AttributeError:
timestamp = | long(os.stat(file | )[8])
codestring = f.read()
f.close()
if codestring and codestring[-1] != '\n':
codestring = codestring + '\n'
try:
codeobject = __builtin__.compile(codestring, dfile or file, 'exec')
except SyntaxError, detail:
lines = traceback.format_exception_only(SyntaxError, detail)
sys.stderr.write("%s: Error compiling\n" % file)
for line in lines:
sys.stderr.write(string.replace(line, 'File "<string>"',
'File "%s"' % (dfile or file)))
return -1
if not cfile:
cfile = file + (__debug__ and 'c' or 'o')
fc = open(cfile, 'wb')
fc.write('\0\0\0\0')
wr_long(fc, timestamp)
marshal.dump(codeobject, fc)
fc.flush()
fc.seek(0, 0)
fc.write(MAGIC)
fc.close()
return 0
if compile(infile, outfile) != 0:
sys.exit(-1)
|
xlhtc007/blaze | blaze/compute/csv.py | Python | bsd-3-clause | 2,667 | 0.00075 | from __future__ import absolute_import, division, print_function
import pandas
import os
from toolz import curry, concat
import pandas as pd
import numpy as np
from collections import Iterator, Iterable
from odo import into
from odo.chunks import chunks
from odo.backends.csv import CSV
from multipledispatch import MDNotImplementedError
from ..dispatch import dispatch
from ..expr import Expr, Head, ElemWise, Distinct, Symbol, Projection, Field
from ..expr.core import path
from ..utils import available_memory
from ..expr.split import split
from .core import compute
from ..expr.optimize import lean_projection
from .pmap import get_default_pmap
__all__ = ['optimize', 'pre_compute', 'compute_chunk', 'compute_down']
@dispatch(Expr, CSV)
def optimize(expr, _):
return lean_projection(expr) # This is handled in pre_compute
@dispatch(Expr, CSV)
def pre_compute(expr, data, comfortable_memory=No | ne, chunksize=2**18, **kwargs):
comfortable_memory = comfortable_memory or min(1e9, available_memory() / 4)
kwargs = dict()
# Chunk if the file is large
if os.path.getsize(data.path) > comfortable_memory:
kwargs['chunksize'] | = chunksize
else:
chunksize = None
# Insert projection into read_csv
oexpr = optimize(expr, data)
leaf = oexpr._leaves()[0]
pth = list(path(oexpr, leaf))
if len(pth) >= 2 and isinstance(pth[-2], (Projection, Field)):
kwargs['usecols'] = pth[-2].fields
if chunksize:
return into(chunks(pd.DataFrame), data, dshape=leaf.dshape, **kwargs)
else:
return into(pd.DataFrame, data, dshape=leaf.dshape, **kwargs)
Cheap = (Head, ElemWise, Distinct, Symbol)
@dispatch(Head, CSV)
def pre_compute(expr, data, **kwargs):
leaf = expr._leaves()[0]
if all(isinstance(e, Cheap) for e in path(expr, leaf)):
return into(Iterator, data, chunksize=10000, dshape=leaf.dshape)
else:
raise MDNotImplementedError()
def compute_chunk(chunk, chunk_expr, part):
return compute(chunk_expr, {chunk: part})
@dispatch(Expr, pandas.io.parsers.TextFileReader)
def compute_down(expr, data, map=None, **kwargs):
if map is None:
map = get_default_pmap()
leaf = expr._leaves()[0]
(chunk, chunk_expr), (agg, agg_expr) = split(leaf, expr)
parts = list(map(curry(compute_chunk, chunk, chunk_expr), data))
if isinstance(parts[0], np.ndarray):
intermediate = np.concatenate(parts)
elif isinstance(parts[0], pd.DataFrame):
intermediate = pd.concat(parts)
elif isinstance(parts[0], (Iterable, Iterator)):
intermediate = concat(parts)
return compute(agg_expr, {agg: intermediate})
|
mishbahr/django-fwdform | setup.py | Python | bsd-3-clause | 1,751 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import fwdform
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
version = fwdform.__version__
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload | ')
sys.exit()
if sys.argv[- | 1] == 'tag':
print("Tagging the version on github:")
os.system("git tag -a %s -m 'version %s'" % (version, version))
os.system("git push --tags")
sys.exit()
long_description = open('README.rst').read()
setup(
name='django-fwdform',
version=version,
description="""Simple and painless form processing for static sites.""",
long_description=long_description,
author='Mishbah Razzaque',
author_email='mishbahx@gmail.com',
url='https://github.com/mishbahr/django-fwdform',
packages=[
'fwdform',
],
include_package_data=True,
install_requires=[
'django-appconf',
'requests>=1.0',
'hashids>=1.0',
'django-cors-headers>=1.0',
],
license='BSD',
zip_safe=False,
keywords='django-fwdform',
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8',
'Framework :: Django :: 1.9',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Natural Language :: English',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
)
|
letter113/android-build-system | android_build_system/pre_checks/env_check.py | Python | apache-2.0 | 851 | 0.001175 | import os
import shutil
from android_build_system.pre_checks.base import BaseCheck
from android_build_system.config import AAPT, ZIPALIGN
class EnvCheck(BaseCheck):
def __init__(self):
super().__init__ | ("Env check")
def _check(self):
return os.environ.get("ANDROID_HOME", None) is not None
class AAPTCheck(BaseCheck):
def __init__(self):
super().__init__("Binary 'aapt' found")
def _check(self):
return AAPT is not None
class ZIPALIGNCheck(BaseCheck):
def __init__(self):
super().__init__("Binary 'zipalgn' found")
def _check(self):
return ZIPALIGN is not None
class CmdCheck(BaseCheck):
| def __init__(self, cmd):
self.cmd = cmd
self.message = "Command '{}' found".format(cmd)
def _check(self):
return shutil.which(self.cmd) is not None |
srimalik/pynfs | nfs4.1/block.py | Python | gpl-2.0 | 10,274 | 0.002531 | from __future__ import with_statement
from xdrdef.pnfs_block_pack import PNFS_BLOCKPacker as Packer
from xdrdef.pnfs_block_pack import PNFS_BLOCKUnpacker as Unpacker
from xdrdef.pnfs_block_type import *
from xdrdef.pnfs_block_const import *
import fs_base
from threading import Lock
import struct
# draft 8
# All sizes are in bytes unless otherwise indicated
"""
Need to be able to set topology in server_exports
From topology, need to create device
"""
id = 0
id_lock = Lock()
def getid(d):
"""Get a new unique id. These are used only internally for printing"""
global id
id_lock.acquire()
out = id
id += 1
id_lock.release()
return out
class BlockVolume(fs_base.LayoutFile):
"""Deals with disk topology information"""
class FakeFs(object):
def _find_extent(self, pos, inode):
# inode here is the topology root block.Volume
vol, v_pos, limit = inode.extent(pos, 1 << 64)
return fs_base.Extent(fs_base.VALID, v_pos, pos, limit, vol._fd)
def __init__(self, volume):
self._component_list = [vol for vol in volume._dump()
if type(vol) == Simple]
self._openlist = []
self.address_body = volume.get_addr()
super(BlockVolume, self).__init__(volume, self.FakeFs(), volume._size)
def open(self, mode="rb+"):
# STUB - need care with mode, for example--append would not work as is
for vol in self._component_list:
# STUB - rewrite in terms of context managers
if vol.backing_dev is None:
raise IOError("No backing device for Simple Volume %i" % vol.id)
vol._fd = open(vol.backing_dev, mode)
self._openlist.append(vol._fd)
return self
def close(self):
# XXX Careful here - what if errors on a close, or previously on open?
for fd in reversed(self._openlist):
fd.close()
__enter__ = open
def __exit__(self, t, v, tb):
self.close()
class Volume(object):
"""Superclass used to represent topology components."""
def get_addr(self):
"""Generate the opaque part of device_addr4 used by NFS4.1.
Note this corresponds to device.address_body property used by
op_getdeviceinfo.
"""
# Create list of all volumes referenced, in order of reference
list = self._dump()
# Create mapping from device to its index in list
mapping = dict(zip(list, range(len(list))))
# Create (unpacked) pnfs_block_volume4 structure for each volume
addr = pnfs_block_deviceaddr4([d.get_xdr(mapping) for d in list])
# Create packed xdr string
p = Packer()
p.pack_pnfs_block_deviceaddr4(addr)
return p.get_buffer()
def _dump(self):
"""Recursively scan for all devices in tree.
They are returned in order of reference, to build volume array.
"""
out = []
for v in self.volumes:
out.extend(v._dump())
out = remove_dups(out)
| out.append(self)
return out
def get_xdr(self, mapping):
"""Returns filled (and unpacked) pnfs_block_volume4 structure.
Nee | d mapping from device:to top-level array index to do the conversion.
"""
raise NotImplementedError
def resolve(self, i):
"""Map a byte offset to the corresponding Simple volume and byte offset.
"""
return NotImplementedError
def extent(self, i, limit):
"""Same as resolve, with addition of how far mapping extends."""
return NotImplementedError
class Simple(Volume):
"""Represents an actual disk. Always a leaf node in the topology tree."""
def __init__(self, signature, size=None, backing_dev=None):
self.type = PNFS_BLOCK_VOLUME_SIMPLE
self.id = getid(self)
if type(signature[0]) == int:
# Make it easy to send a single component
signature = [signature]
self.sig = [pnfs_block_sig_component4(i, s) for i, s in signature]
self._size = size # in bytes
self.backing_dev = backing_dev
if backing_dev is None:
if size is None:
raise ValueError("Must set either size or backing_dev")
return
self._fd = None
with open(backing_dev, "rb+") as fd:
# Determine device's actual size
fd.seek(0, 2)
true_size = fd.tell()
if size is None:
self._size = true_size
elif true_size < size:
raise ValueError("backing dev size %r < %r" % (true_size, size))
self._write_sig(fd)
def _write_sig(self, fd):
"""Write out disk signature to open fd."""
for comp in self.sig:
offset = comp.bsc_sig_offset
if offset < 0:
offset += self._size
fd.seek(offset)
fd.write(comp.bsc_contents)
def __repr__(self):
return "Simple %i" % self.id
def _dump(self):
"""Since this is always a leaf node of tree, end recursive scan."""
return (self, )
def get_xdr(self, mapping):
info = pnfs_block_simple_volume_info4(self.sig)
return pnfs_block_volume4(PNFS_BLOCK_VOLUME_SIMPLE, bv_simple_info=info)
def resolve(self, i):
# print "resolve(%i) %r" % (i, self)
if i < 0 or i >= self._size:
raise ValueError("Asked for %i of %i" % (i, self._size))
return (self, i)
def extent(self, i, limit):
return (self, i, min(limit, self._size - i))
class Slice(Volume):
"""A contiguous slice from a single volume."""
def __init__(self, volume, start, length):
self.type = PNFS_BLOCK_VOLUME_SLICE
self.id = getid(self)
self.start = start # block offset
self.length = length # length in blocks
self.volumes = [volume] # volume which is sliced
self._size = length
def __repr__(self):
return "Slice %i (from vol %i)" % (self.id, self.volumes[0].id)
def get_xdr(self, mapping):
info = pnfs_block_slice_volume_info4(self.start, self.length,
mapping[self.volumes[0]])
return pnfs_block_volume4(PNFS_BLOCK_VOLUME_SLICE, bv_slice_info=info)
def resolve(self, i):
# print "resolve(%i) %r" % (i, self)
# print self.start, self._size, self.length
if i < 0 or i >= self._size:
raise ValueError("Asked for %i of %i" % (i, self._size))
return self.volumes[0].resolve(self.start + i)
def extent(self, i, limit):
return self.volumes[0].extent(self.start + i,
min(limit, self._size - i))
class Concat(Volume):
"""A simple concatanation of several volumes."""
def __init__(self, volumes):
self.type = PNFS_BLOCK_VOLUME_CONCAT
self.id = getid(self)
self.volumes = volumes
self._size = sum([v._size for v in volumes])
def get_xdr(self, mapping):
info = pnfs_block_concat_volume_info4([mapping[v] for v in self.volumes])
return pnfs_block_volume4(PNFS_BLOCK_VOLUME_CONCAT, bv_concat_info=info)
def __repr__(self):
return "Concat %i of %r" % (self.id, [v.id for v in self.volumes])
def resolve(self, i):
# print "resolve(%i) %r" % (i, self)
if i < 0 or i >= self._size:
raise ValueError("Asked for %i of %i" % (i, self._size))
sum = 0
for v in self.volumes:
next = sum + v._size
if i < next:
return v.resolve(i - sum)
sum = next
# Shouldn't get here
raise RuntimeError
def extent(self, i, limit):
sum = 0
for v in self.volumes:
next = sum + v._size
if i < next:
return v.extent(i - sum, min(limit, next - i))
sum = next
# Shouldn't get here
raise RuntimeError
class Stripe(Volume):
"""Stripe of several volumes, all of the same size."""
def __init__(self, size, volumes):
self.type = PNFS_BLOCK |
liushuaikobe/GitArchiveUtils | daily-task/util.py | Python | gpl-2.0 | 3,053 | 0.004751 | # -*- coding: utf-8 -*-
import glob
import os
import csv
import smtplib
from email.mime.text import MIMEText
from whoosh import index
from whoosh.fields import Schema, TEXT
import config
from database import get_redis_pipeline
mail_config = config.mail_config
def sendmail(sbj, content,
fromwhom=mail_config['from'], towhom=mail_config['to'],
server=mail_config['server'], username=mail_config['username'], pwd=mail_config['pwd']):
try:
msg = msg.encode('utf-8')
except Exception, e:
pass
msg = MIMEText(content)
msg['Subject'] = sbj
msg['From'] = fromwhom
msg['To'] = towhom
s = smtplib.SMTP(server)
s.ehlo()
s.starttls()
s.login(username, pwd)
s.sendmail(fromwhom, towhom, msg.as_string())
def detect(base, year, month, day):
"""检测指定base目录下的某天的文件是否齐全"""
kwargs = {'year': year, 'month': month, 'day': day}
f_name = '{year}-{month:02d}-{day:02d}-*.json.gz'.format(**kwargs)
r1 = os.path.join(base, f_name)
r2 = '*.json.gz'
return len(glob.glob(r1)) == 24 and len(glob.glob(r2)) == 0
class WhooshUtil(object):
"""Whoosh搜索相关工具"""
def __init__(self, ix_path=config.ix_path):
super | (WhooshUtil, self).__init__()
self.ix_path = ix_path
self.schema = | Schema(location=TEXT(stored=True), rlocation=TEXT(stored=True))
def build_whoosh_index(self):
"""建立location的Whoosh搜索索引"""
if not os.path.exists(self.ix_path):
os.mkdir(self.ix_path)
ix = index.create_in(self.ix_path, self.schema)
else:
ix = index.open_dir(self.ix_path)
self.writer = ix.writer()
def add_search_doc(self, location, rlocation, execute_right_now=True):
"""添加搜索内容"""
self.writer.add_document(location=unicode(location, 'utf-8'), rlocation=unicode(rlocation, 'utf-8'))
if execute_right_now:
self.writer.commit()
def commit(self):
self.writer.commit()
def grcount2csv(output_path=config.csv_path):
"""把grcount的信息转化成CSV文件,以便在前端展示"""
with open(os.path.join(output_path, 'grcount.csv'), 'wb') as csvfile:
csv_writer = csv.writer(csvfile)
csv_writer.writerow(['title', 'country', 'count', 'latitude', 'longitude'])
pipe = get_redis_pipeline()
keys_pattern = ':'.join((config.redis_count_prefix, '*', 'lng'))
pipe.keys(pattern=keys_pattern)
keys = pipe.execute()
for key in keys[0]:
prefix, location_and_counttry, item = key.split(':')
title, country = location_and_counttry.split('@')
pipe.get(':'.join((prefix, location_and_counttry, 'lat')))
pipe.get(':'.join((prefix, location_and_counttry, 'lng')))
pipe.get(':'.join((prefix, location_and_counttry, 'count')))
lat, lng, count = pipe.execute()
csv_writer.writerow([title, country, count, lat, lng])
|
Dinoshauer/pryvate | pryvate/blueprints/pypi/pypi.py | Python | mit | 1,874 | 0 | """PyPi blueprint."""
import os
from flask import Blueprint, current_app, g, request
blueprint = Blueprint('pypi', __name__, url_prefix='/pypi')
def register_package(localproxy):
"""Register a new package.
Creates a folder on the filesystem so a new package can be uploaded.
Arguments:
localproxy (``werkzeug.local.LocalProxy``): The localproxy object is
needed to read the ``form`` properties from the request
Returns:
``'ok'``
"""
package_dir = os.path.join(current_app.config['BASEDIR'],
localproxy.form['name'].lower())
if not os.path.isdir(package_dir):
os.mkdir(package_dir)
return 'ok'
def upload_package(localproxy):
"""Save a new package and it's md5 sum in a previously registered path.
Arguments:
localproxy (``werkzeug.local.LocalProxy``):The localproxy object is
needed to read the ``form`` properties from the request
Returns:
``'ok'``
"""
contents = localproxy.files['content']
digest = localproxy.form['md5_dig | est']
file_path = os.path.join(current_app.config['BASEDIR'],
localproxy.form['name'].lower(),
contents.filename.lower())
contents.save(file_path)
with open('{}.md5'.format(file_path), 'w') as md5_digest:
md5_digest.write(digest)
return 'ok'
@blueprint. | route('', methods=['POST'])
def post_pypi():
"""Find a package and return the contents of it.
Upon calling this endpoint the ``PRIVATE_EGGS`` set will be updated,
and proper action will be taken based on the request.
"""
actions = {
'submit': register_package,
'file_upload': upload_package,
}
if g.database.new_egg(request.form['name'].lower()):
return actions[request.form[':action']](request)
|
PPCDroid/external-lirc | tools/pronto2lirc.py | Python | gpl-2.0 | 4,612 | 0.019514 | #
# A tool for converting Pronto format hex codes to lircd.conf format
#
# Copyright by Olavi Akerman <olavi.akerman@gmail.com>
#
# pronto2lirc is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
class CodeSequence: # Handles codesequences parsing and conversion
def ProcessPreamble(self,sPreamble):
if sPreamble[0]<>"0000":
raise "Formats other than starting with 0000 are not supported!"
self.dIRFrequency=1000000/(long(sPreamble[1],16) * 0.241246) # Frequency of the IR carrier in Khz
self.lOnceSequenceLength=long(sPreamble[2],16) # No of pulses that is sent once when button is pressed
self.lRepeatableSequenceLength=long(sPreamble[3],16) # No of pulses that are repeatable while button pressed
def CreatePulses(self,sItems):
self.dPulseWidths=[] # Table of Pulse widths. Length is repsented in microseconds
for i in sItems:
self.dPulseWidths.append(1000000*long(i,16)/self.dIRFrequency) # Convert pulse widths to uS
if len(self.dPulseWidths)<>2*(self.lOnceSequenceLength+self.lRepeatableSequenceLength):
raise "Number of actual codes does not match the header information!"
def AnalyzeCode(self,sCodeName,sHexCodes):
sHexTable=sHexCodes.split()
self.sCodeName=sCodeName.rstrip() # Name of the Code associated with code sequence
self.ProcessPreamble(sHexTable[:4]) # First four sequences make up Preamble
self.CreatePulses(sHexTable[4:]) # The rest are OnceSequence + RepeatableSequence
return self.dPulseWidths[-1] # Final gap=last off signal length
def WriteCodeSection(self,fOut):
fOut.write('\n\t\t\tname '+self.sCodeName+'\n')
for i in range(len(self.dPulseWidths)-1): # Do not write the last signal as lircd.conf
# does not contain last off signal length
if (i%6) ==0:
fOut.write('\t\t\t\t')
fOut.write('%d ' % round(self.dPulseWidths[i]))
if (i+1)%6 ==0: # Group codes as six per line
fOut.write('\n')
fOut.write('\n') # Final EOL
class HexParser:
def __init__(self,sFileName):
f=open(sFileName,'r')
self.sRemoteName=sFileName.split('.')[:1][0] # Name of the remote
self.sCodes=[] # Codes contained in file
self.lGap=0 # Final Gap
while True:
sLine=f.readline()
if sLine=='' or sLine.strip()=='': | # EOF?
break
[sCodeName,sHexCodes]=sLine.split(':')
seq=CodeSequence()
finalgap=seq.AnalyzeCode(sCodeName,sHexCodes)
if finalgap>self.lGap:
self.lGap=finalgap
self.sCodes.append(seq)
f.close()
def WriteLIRC | Conf(self,sOutFileName):
f=open(sOutFileName,'w')
f.write('begin remote\n')
f.write('\tname\t'+self.sRemoteName+'\n')
f.write('\tflags\tRAW_CODES\n')
f.write('\teps\t30\n')
f.write('\taeps\t100\n')
f.write('\tgap\t%d\n' % self.lGap )
f.write('\t\tbegin raw_codes\n')
for i in self.sCodes:
i.WriteCodeSection(f)
f.write('\t\tend raw_codes\n')
f.write('end remote\n')
f.close()
# Main
import sys
if len(sys.argv)<>2:
print "Pronto codes converter to lircd.conf format (version 1.00)"
print
print "Usage: pronto2lirc.py inputfile.hex "
print
print "Input file must be in format where each line contains all codes"
print " associated with a button like:"
print " Button1:0000 00ac 000b 00de ..."
print
print "Result: lircd.conf file is written to the current directory"
print " containing all the Pronto codes extracted from"
print " the input file"
print
else:
p=HexParser(sys.argv[1])
p.WriteLIRCConf('lircd.conf')
|
gsathya/dsalgo | run_tests.py | Python | mit | 834 | 0.004796 | import unittest
import test.bst
import test.sorted_array_to_bst
import test.edit_distance
import test.binary_search
import test.print_level_order_test
import test.binary_add_test
impor | t test.linked_list_test
suite = unittest.TestLoader()
suite | = suite.loadTestsFromModule(test.bst)
suite.addTest(unittest.TestLoader().loadTestsFromModule(test.sorted_array_to_bst))
suite.addTest(unittest.TestLoader().loadTestsFromModule(test.edit_distance))
suite.addTest(unittest.TestLoader().loadTestsFromModule(test.binary_search))
#suite.addTest(unittest.TestLoader().loadTestsFromModule(test.print_level_order_test))
suite.addTest(unittest.TestLoader().loadTestsFromModule(test.binary_add_test))
suite.addTest(unittest.TestLoader().loadTestsFromModule(test.linked_list_test))
if __name__ == "__main__":
unittest.TextTestRunner().run(suite)
|
rgayon/plaso | tests/parsers/setupapi.py | Python | apache-2.0 | 5,214 | 0.000959 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Tests for the Windows Setupapi log parser."""
from __future__ import unicode_literals
import unittest
from plaso.parsers import setupapi
from tests.parsers import test_lib
class SetupapiLogUnitTest(test_lib.ParserTestCase):
"""Tests for the Windows Setupapi log parser.
Since Setupapi logs record in local time, these tests assume that the local
timezone is set to UTC.
"""
def testParseDevLog(self):
"""Tests the Parse function on setupapi.dev.log."""
parser = setupapi.SetupapiLogParser()
storage_writer = self._ParseFile(['setupapi.dev.log'], parser)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 388)
events = list(storage_writer.GetEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2015-11-22 17:59:28.110000')
event = events[2]
self.CheckTimestamp(event.timestamp, '2016-10-05 11:16:03.747000')
event = events[4]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.CheckTimestamp(event.timestamp, '2016-10-05 11:16:16.471000')
expected_message = (
'Device Install (Hardware initiated) - SWD\\IP_TUNNEL_VBUS'
'\\Teredo_Tunnel_Device')
expected_short_message = (
'Device Install (Hardware initiated) - SWD\\IP_TUNNEL_VBUS'
'\\Teredo_Tunnel_Device')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
event = events[57]
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_message = (
'Device Install (DiInstallDriver) - C:\\Windows\\System32'
'\\DriverStore\\FileRepository\\prnms003.inf_x86_8f17aac186c70ea6'
'\\prnms003.inf - SUCCESS')
expected_short_message = (
'SUCCESS - Device Install (DiInstallDriver) - C:\\Windows\\System32'
'\\DriverStore\\...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
event = events[386]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.CheckTimestamp(event.timestamp, '2016-11-22 23:50:30.938000')
expected_message = (
'Device Install (Hardware initiated) - SWD\\WPDBUSENUM'
'\\_??_USBSTOR#Disk&Ven_Generic&Prod_Flash_Disk&Rev_8.07#99E2116A&0'
'#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}')
expected_short_message = (
'Device Install (Hardware initiated) - SWD\\WPDBUSENUM'
'\\_??_USBSTOR#Disk&Ven_Gen...')
self._TestGetMessageStrings(
event_data, expected_message, expect | ed_short_message)
def testParseSetupLog(self):
"""Tests the Parse function on setupapi.setup.log."""
parser = setupapi.SetupapiLogParser()
storage_writer = self._ParseFile(['setupapi.setup.log'], parser)
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 32)
events = list(storage_writer.GetEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2015-11-22 17:53:16.599000')
| event = events[2]
self.CheckTimestamp(event.timestamp, '2015-11-22 17:53:28.973000')
event = events[4]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.CheckTimestamp(event.timestamp, '2015-11-22 17:53:29.305000')
expected_message = 'Setup Plug and Play Device Install'
expected_short_message = 'Setup Plug and Play Device Install'
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
event = events[14]
event_data = self._GetEventDataOfEvent(storage_writer, event)
expected_message = (
'Setup online Device Install (Hardware initiated) - SW'
'\\{97ebaacc-95bd-11d0-a3ea-00a0c9223196}'
'\\{53172480-4791-11D0-A5D6-28DB04C10000}')
expected_short_message = (
'Setup online Device Install (Hardware initiated) - SW'
'\\{97ebaacc-95bd-11d0-a3e...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
event = events[30]
event_data = self._GetEventDataOfEvent(storage_writer, event)
self.CheckTimestamp(event.timestamp, '2015-11-22 17:57:17.502000')
expected_message = (
'Setup Import Driver Package - C:\\Windows\\system32'
'\\spool\\tools\\Microsoft XPS Document Writer\\prnms001.Inf')
expected_short_message = (
'Setup Import Driver Package - C:\\Windows\\system32\\spool'
'\\tools\\Microsoft XPS D...')
self._TestGetMessageStrings(
event_data, expected_message, expected_short_message)
def testParseSetupLogWithTimeZone(self):
"""Tests the Parse function on setupapi.setup.log with a time zone."""
parser = setupapi.SetupapiLogParser()
storage_writer = self._ParseFile(
['setupapi.setup.log'], parser, timezone='CET')
self.assertEqual(storage_writer.number_of_warnings, 0)
self.assertEqual(storage_writer.number_of_events, 32)
events = list(storage_writer.GetEvents())
event = events[0]
self.CheckTimestamp(event.timestamp, '2015-11-22 16:53:16.599000')
if __name__ == '__main__':
unittest.main()
|
o2gy84/libproperty | src/clang_compilation_database.py | Python | gpl-3.0 | 3,027 | 0.002973 | #!/usr/bin/env python
# encoding: utf-8
# Christoph Koke, 2013
"""
Writes the c and cpp compile commands into build/compile_commands.json
see http://clang.llvm.org/docs/JSONCompilationDatabase.html
Usage:
def configure(conf):
conf.load('compiler_cxx')
...
conf.load('clang_compilation_database')
"""
import sys, os, json, shlex, pipes
from waflib import Logs, TaskGen, Task
Task.Task.keep_last_cmd = True
if sys.hexversion >= 0x3030000:
quote = shlex.quote
else:
quote = pipes.quote
@TaskGen.feature('c', 'cxx')
@TaskGen.after_method('process_use')
def collect_compilation_db_tasks(self):
"Add a compilation database entry for compiled tasks"
try:
clang_db = self.bld.clang_compilation_database_tasks
except AttributeError:
clang_db = self.bld.clang_compilation_database_tasks = []
self.bld.add_post_fun(write_compilation_database)
tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
for task in getattr(self, 'compiled_tasks', []):
if isinstance(task, tup):
clang_db.append(task)
def write_compilation_database(ctx):
"Write the clang compilation database as JSON"
database_file = ctx.bldnode.make_node('compile_commands.json')
Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path))
try:
root = json.load(database_file)
except IOError:
root = []
clang_db = dict((x['file'], x) for x in root)
for task in getattr(ctx, 'clang_compilation_database_tasks', []):
try:
cmd = task.last_cmd
except AttributeError:
continue
directory = getattr(task, 'cwd', ctx.variant_dir)
f_node = task.inputs[0]
filename = os.path.relpath(f_node.abspath(), directory)
cmd = " ".join(map(quote, cmd))
entry = {
"directory": directory,
"command": cmd,
"file": filename,
}
clang_db[filename] = entry
root = list(clang_db.values())
database_file.write(json.dumps(root, indent=2))
# Override the runnable_status function to do a dummy/dry run when the file doesn't need to be compiled.
# This will make sure compile_commands.json is always fully up to date.
# Previously you could end up with a partial compile_commands.json if the build failed.
for x in ('c', 'cxx'):
if x not in Task.classes:
continue
t = Task.classes[x]
de | f runnable_status(self):
def exec_co | mmand(cmd, **kw):
pass
run_status = self.old_runnable_status()
if run_status == Task.SKIP_ME:
setattr(self, 'old_exec_command', getattr(self, 'exec_command', None))
setattr(self, 'exec_command', exec_command)
self.run()
setattr(self, 'exec_command', getattr(self, 'old_exec_command', None))
return run_status
setattr(t, 'old_runnable_status', getattr(t, 'runnable_status', None))
setattr(t, 'runnable_status', runnable_status)
|
stevelittlefish/easyforms | easyforms/form.py | Python | apache-2.0 | 35,700 | 0.004566 |
"""
Base classes for forms and fields
"""
import logging
from collections import OrderedDict
from flask import Markup, request
from . import validate
from . import exceptions
from . import formtype
from . import styles
from .env import env
__author__ = 'Stephen Brown (Little Fish Solutions LTD)'
log = logging.getLogger(__name__)
_csrf_generation_function = None
_default_form_type = formtype.HORIZONTAL
def init_csrf(csrf_generation_function):
"""
Calling this function will initialised CSRF behaviour in all forms in the application.
Pass in a function that returns a string containing a valid CSRF token, and this function
will be called in each form, and will add a hidden input named "_csrf_token". This does
not validate the token.
"""
global _csrf_generation_function
_csrf_generation_function = csrf_generation_function
def convert_name_to_label(name):
"""Convert hyphen separated field names to label text"""
return nam | e.replace('-', ' ').title()
def set_default_form_type(form_type):
global _default_form_type
if form_type not in formtype.ALL_FORM_TYPES:
raise ValueError('Invalid form type: {}'.format(form_type) | )
_default_form_type = form_type
class Field(object):
def __init__(self, name, label=None, value=None, id=None, optional=False, css_class='',
readonly=False, help_text=None, strip_value=True, convert_empty_to_none=True,
validators=[], required=False, render_after_sections=False, allow_missing=False,
width=9, help_text_width=9, label_width=None, units=None, pre_units=None,
form_group_css_class=None, noclear=False, requires_multipart=False,
column_breakpoint=None, max_width=None, multiple_inputs=False,
base_input_css_class='form-control', allow_duplicates=False):
"""
:param name: The name of the field (the name field in the generated input)
:param label: The label text. If None, is automatically generated from the name
:param value: The value of the field
:param id: The id for the input. If None, the name is used
:param optional: If True, '(optional)' is rendered next to the label. Defaults to False
:param css_class: If set, adds extra css classes to the input (space separate inside string)
:param readonly: If True, makes this field readonly. If the parent form.readonly is True,
then that will take precendence over this and this field will also be
readonly. Defaults to False
:param help_text: Text rendered beside the input
:param strip_value: If True (by default) strips whitespace off of the submitted value
:param convert_empty_to_none: If True (by default) converts empty strings to None in submitted values
:param validators: List of validation functions
:param required: If True, automatically adds the 'required' validator. Defaults to False
:param render_after_sections: If True, will render this after all sections in form.render_all().
Defaults to False
:param allow_missing: If True, we won't throw an error if this field is totally missing from the submitted
values. Used to implement checkboxes, defaults to False
:param width: The width of this component in the bootstrap grid system
:param help_text_width: The width of the help text for this component in the bootstrap grid system
:param label_width: The width of the label for this component in the bootstrap grid system
:param units: Units to append after the input
:param pre_units: Units to prepend before the input
:param form_group_css_class: The css class to append to the form group
:param noclear: If set to True, this field will not be cleared by Form.clear()
:param requires_multipart: Does this field require multipart form data? Used for file uploads
:param column_breakpoint: Bootstrap column breakpoint where horizontal form degrades into
vertical form. Values: sm, md, lg. If None (default) inherit
from form
:param max_width: Maximum width, either an integer value representing the number of pixels
or a string containing a units i.e. '50%' or '240px'
:param multiple_inputs: Set to true if this field consists of multiple input tags with the same
name. This will cause value to be an list of strings after
processing form data, with each element containing one of the
submitted values
:param base_input_css_class: The default css class to put on the input. Defaults to
form-control
:param allow_duplicates: If set to True, allow this field to be added to the form even if
there is already a field with the same name. Note that if
duplicate fields are present, only one of them will be retreivable
by name
"""
self.name = name
if label is None:
self.label = convert_name_to_label(name)
else:
self.label = label
if id is None:
self.id = name
else:
self.id = id
self.value = value
self.optional = optional
self.css_class = css_class
self._readonly = readonly
self.help_text = help_text
self.strip_value = strip_value
self.convert_empty_to_none = convert_empty_to_none
self.validators = validators[:]
self.error = None
self.render_after_sections = render_after_sections
self.allow_missing = allow_missing
self.width = width
self.help_text_width = help_text_width
self._label_width = label_width
self.units = units
self.pre_units = pre_units
self.form_group_css_class = form_group_css_class
self.noclear = noclear
self.requires_multipart = requires_multipart
self._column_breakpoint = column_breakpoint
self.max_width = max_width
if isinstance(self.max_width, int):
self.max_width = '{}px'.format(self.max_width)
self.multiple_inputs = multiple_inputs
self.base_input_css_class = base_input_css_class
self.allow_duplicates = allow_duplicates
# This should get set by the form when we add it
self.form = None
# Handle common validation options
self.required = required
if required:
self.validators.append(validate.required)
@property
def label_html(self):
show_asterisks = False
if self.form:
show_asterisks = self.form.show_asterisks
if show_asterisks and self.required:
return Markup('<span class="required">*</span> ') + self.label
return self.label
@property
def label_width(self):
if self._label_width is not None:
return self._label_width
if self.form:
return self.form.label_width
return 3
@label_width.setter
def label_width(self, val):
self._label_width = val
@property
def column_breakpoint(self):
if self._column_breakpoint is not None:
return self._column_breakpoint
if self.form:
return self.form.column_breakpoint
return 'sm'
@column_breakpoint.setter
def column_breakpoint(self, val):
self._column_breakpoint = val
@property
def readonly(self):
if self.form and self.form.readonly is True:
return True
return self._readonly
@readonly.setter
def readonly(self, val):
self._readonly = val
def render(self):
return '<div class="alert alert-warning">Render not implemented for {}!</div>'.format(self.__class__.__name__)
d |
braghiere/JULESv4.6_clump | examples/us-me2/output/plot_limiting_vertical_bl.py | Python | gpl-2.0 | 10,209 | 0.029484 | # June 2015
# read and plot the co2 runs on jules
import os
import matplotlib.pyplot as plt
import numpy as np
import sys
from matplotlib.font_manager import FontProperties
from matplotlib.ticker import MultipleLocator
import matplotlib.patches as mpatches # for | mask legend
from matplotlib.font_manager import FontProperties
from matplotlib import cm
import pandas as pd
from matplotlib import dates as d
import datetime as dt
import scipy.stats as st
import matplotlib.pyplot as plt; plt.rcdefaults()
import numpy | as np
from scipy.ndimage.filters import gaussian_filter
import scipy.ndimage
#from colormap_viridis import test_cm as viridis # viridis colormap from
#from colormaps_reference.py import *
# https://github.com/BIDS/colormap/blob/master/option_d.py
fontP = FontProperties()
#fontP.set_size('small')
fontP.set_size('large')
plt.style.use('ggplot')
SIZE = 32
plt.rc('font', size=SIZE) # controls default text sizes
plt.rc('axes', titlesize=SIZE) # fontsize of the axes title
plt.rc('axes', labelsize=SIZE) # fontsize of the x any y labels
plt.rc('xtick', labelsize=SIZE) # fontsize of the tick labels
plt.rc('ytick', labelsize=SIZE) # fontsize of the tick labels
plt.rc('legend', fontsize=SIZE) # legend fontsize
plt.rc('figure', titlesize=SIZE) # # size of the figure title
#color_palette = ['#3366FF','#33CCFF', '#33FFCC']
# when using a palette (discrete number of colors) set karg 'colors'
# when using a colormpap use karg 'cmap'
# yellow to indigo
#rsndom
#color_palette_test = ['#000F89','#005489','#00897a','#008936','#538900','#897b00','#893600']
#green_palette =['rgb(237,248,233)','rgb(199,233,192)','rgb(161,217,155)','rgb(116,196,118)','rgb(65,171,93)','rgb(35,139,69)','rgb(0,90,50)']
#green_palette_hex=['#edf8e9', '#c7e9c0', '#a1d99b', '#74c476', '#41ab5d', '#238b45', '#005a32']
#green_palette_hex8=['#f7fcf5', '#e5f5e0', '#c7e9c0', '#a1d99b', '#74c476', '#41ab5d', '#238b45', '#005a32']
#inputdir = "/home/mn811042/src/JULES_photo/JULES_photo_Renato/output/c4"
#filename = 'output_layer_200_%i_12.csv'
inputdir = "/glusterfs/phd/users/mn811042/jules_struct/JULES_trunk_lim_gpp/examples/boreas/output/files/"
#filename = 'output_layer_250_%i_12_a_042.csv'
filename = 'output_layer_500_%i_11_CRM_5_diff_struc.txt'
#filename2 = 'output_layer_400_%i_12_a_05_idown.csv'
#output_layer_500_%d_11_CRM_5_diff.txt
PPFDs = np.arange(50)
#temps = np.arange(0,52,2)
#cas = np.arange(200,850,50)
layers = np.arange(1,11,1)
szas = np.arange(30,91,1)
data = pd.read_csv("file_lai_500_CRM_5_diff_struc.csv", index_col=0, parse_dates=True).tz_localize("UTC").tz_convert("Etc/GMT+7")
data.head()
#data['Time'] = data.index.map(lambda x: x.strftime("%H:%M"))
#print data['Time']
#data.head()
#data = data.groupby('Time').describe().unstack()
data = data.groupby(pd.cut(data["cosz"], np.arange(0.0,1.0,0.075))).describe().unstack()
#print data
#data.index = pd.to_datetime(data.index.astype(str))
data.index = np.degrees(np.arccos(data["cosz"]['mean']))
szas = np.int16(data.index)
print szas
#print data
#for opt T
#filename = 'output_%i_%i_pft%i_opt_t'
#PPFDs = np.arange(0)
#temps = np.arange(0,90,10)
#cas = np.arange(400)
# create a matrix dim0: PPFDs,
# dim1: Tl
# dim2: cas
# dim3: variables # An, Wc, Wl, We, mask (1 = Wcarb limiting; 2= Wexp limiting)
ft = 3
#W = np.zeros((len(PPFDs),len(temps),len(cas),12))
W = np.zeros((len(szas),len(layers),10))
#for i,p in enumerate(PPFDs):
for j,sza in enumerate(szas):
ofile = filename %(sza)
print ofile
with open(os.path.join(inputdir,ofile),'r') as f:
#header = f.readline()
lines = f.readlines()
for k,line in enumerate(lines): #podria hacer otro loop
#por todas las variables
W[j,k,0] = line.split(' ')[1] # GPP
W[j,k,1] = line.split(' ')[3] # Wcarb
W[j,k,2] = line.split(' ')[2] # Wlite
W[j,k,3] = line.split(' ')[4] # Wexp
#W[j,k,4] = line.split(',')[14] # Rd
#W[j,k,5] = line.split(',')[11] # ci
#W[j,k,6] = line.split(',')[12] # gl
#W[j,k,7] = line.split(',')[18] # dqc
#W[j,k,9] = W[j,k,0] + W[j,k,4] # NEE
W[j,k,9] = W[j,k,0] #GPP
#print j,k,W[j,k,0]
if (W[j,k,1]==0) & (W[j,k,2]==0) & (W[j,k,3]==0):
Wmin = 0
ma = 0 # all are zero
elif (W[j,k,1] <= W[j,k,2]): # compare Wcarb and Wlite
Wmin = W[j,k,1]
ma= 1 # Wcarb limiting
else:
Wmin = W[j,k,2]
ma= 2 # Wlite limiting
# compare the minimun of Wcarb and Wlite with Wexp, but only if they are not all 0
if (ma!=0) & (W[j,k,3] <= Wmin):
Wmin = W[j,k,3]
ma= 3 # Wexp limiting
W[j,k,8] = ma
#print W[j,k,8]
#create masks # ILL TRy without, just using vataible l=8
#if (f==0): W[i,j,9] = 1
#if (f==1): W[i,j,10] = 1
#if (f==2): W[i,j,11] = 1
###########################################################################################
#sys.exit()
###############################################################################################
# MASKS for limiting regimes
wc_BL = np.ma.masked_where(W[:,:,8] !=1, W[:,:,0]) #mask what is not wc
wl_BL = np.ma.masked_where(W[:,:,8] !=2, W[:,:,0]) #mask what is no wl
we_BL = np.ma.masked_where(W[:,:,8] !=3, W[:,:,0]) #mask what is not we
print "wc_BL=",wc_BL
print "wl_BL=",wl_BL
print "we_BL=",we_BL
#sys.exit()
#####################################################################################
#plots the An,would be nice to add shapes for limits
#plt.scatter(temps_repeat,PPFDs_tile,c=W[:,:,4,0].T,s=100) # scatter with colored balls
#
print "HERE"
szas_repeat= np.repeat(layers,len(szas))
layers_tile = np.tile(szas,len(layers))
#W=W5
#plt.clf()
fig,ax = plt.subplots(figsize=(10,10))
#levels = np.arange(0,16,0.5)
#levels = np.arange(0,16,2)
fig.subplots_adjust(right=0.75)
#ca=200ppm
#plt.subplot(311)
#plt.title('ca = 200 ppm')
# Mask where not 1 (carbon), carbon=1, rest=true(masked)
W_carb = np.ma.masked_where(W[:,:,8].T !=1, W[:,:,8].T)
W_lite = np.ma.masked_where(W[:,:,8].T !=2, W[:,:,8].T)
W_exp = np.ma.masked_where(W[:,:,8].T !=3, W[:,:,8].T)
print "W_carb =",W_carb
print "W_lite =",W_lite
print "W_exp =",W_exp
#print "W[:,:,0,0]*1e6=",W[:,:,0]*1e6
#print "GPP[:,:,0,0]*1e6=",W[:,:,9]*1e6
plt.gca().invert_yaxis()
plt.ylim(ymin=10.5,ymax=0.5)
plt.xlim(xmin=29.2,xmax=90.5)
plt.yticks(np.arange(1.0, 10.1, 1.0))
#c1=plt.contourf(temps,PPFDs,W[:,:,0,0]*1e6,levels,extend='both',cmap= viridis,alpha=0.8 ) # so th enegative values dont appear
#c1=plt.contourf(layers,szas,W[:,:,0]*1e6,levels,extend='both',alpha=0.8 ) ,np.arange(0, 0.901, .001)
# RAW
#c1=plt.contourf(szas,layers,W[:,:,9].T,np.arange(0, 3.01, 0.05),extend='both',cmap=cm.viridis,alpha=0.6 )
# SMOOTHED gaussian_filter(data, sigma)
#data = gaussian_filter(W[:,:,9].T,0.7)
c1=plt.contourf(szas,layers,W[:,:,9].T,np.arange(0, 3.01, 0.05),extend='both',cmap=cm.viridis,alpha=1.0,interpolation='bicubic')
#c1 = plt.imshow(W[:,:,9].T,extent=(max(szas), min(szas), max(layers), min(layers)),interpolation='bicubic')
# in white
# To avoid rainbow scale could use , cmap=plt.cm.RdYlBu, but i think is less clear
# Gives error if the mask is all true (that limit is not present)
# If the mask is all tru, scatter gives a value error,in that case
# scatter with dummy argument, so the label still appears in the legend
#try: plt.scatter(szas_repeat,layers_tile,c=W_carb,marker='^',color='k',s=30,label='c |
kervi/kervi | ukervi/ukervi/platforms/upython/i2c_driver.py | Python | mit | 1,355 | 0.00369 | #Copyright 2018 Tim Wentlau.
#Distributed under the MIT License. See LICENSE in root of project.
from kervi.hal.i2c import II2CDeviceDriver
class I2CDeviceDriver(II2CDeviceDriver):
"""
Class for communicating with an I2C devices.
"""
def __init__(self, address, busnum):
raise NotImplementedError
def write_raw8(self, value):
raise NotImplementedError
def write8(self, register, value):
raise NotImplementedError
def write16(self, register, value):
raise NotImplementedError
def write_list(self, register, data):
raise NotImplementedError
def read_list(self, register, length):
raise NotImplementedError
def | read_raw8(self):
raise NotImplementedError
def read_U8(self, register):
raise NotImplementedError
def read_S8(self, register):
raise NotImplementedError
def read_U16(self, register, little_endian=True):
raise NotImplementedError
def read_S16(self, register, littl | e_endian=True):
raise NotImplementedError
def read_U16LE(self, register):
raise NotImplementedError
def read_U16BE(self, register):
raise NotImplementedError
def read_S16LE(self, register):
raise NotImplementedError
def read_S16BE(self, register):
raise NotImplementedError
|
zwegner/pythonc | tests/import/submodule.py | Python | gpl-3.0 | 30 | 0 | print('In sub | module | .')
x = 3
|
rven/odoo | addons/hr/wizard/hr_plan_wizard.py | Python | agpl-3.0 | 1,529 | 0.001962 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo import api, fields, mod | els
class HrPlanWizard(models.TransientModel):
_name = 'hr.plan.wizard'
_description = 'Plan Wizard'
plan_id = fields.Many2one('hr.plan', default=lambda self: self.env['hr.plan'].search([], limit=1))
employee_id = fields.Many2one(
'hr.employee', string='Employee', required=True,
default=lambda self: self.env.context.get('active_id', None),
)
def action_laun | ch(self):
for activity_type in self.plan_id.plan_activity_type_ids:
responsible = activity_type.get_responsible_id(self.employee_id)
if self.env['hr.employee'].with_user(responsible).check_access_rights('read', raise_exception=False):
date_deadline = self.env['mail.activity']._calculate_date_deadline(activity_type.activity_type_id)
self.employee_id.activity_schedule(
activity_type_id=activity_type.activity_type_id.id,
summary=activity_type.summary,
note=activity_type.note,
user_id=responsible.id,
date_deadline=date_deadline
)
return {
'type': 'ir.actions.act_window',
'res_model': 'hr.employee',
'res_id': self.employee_id.id,
'name': self.employee_id.display_name,
'view_mode': 'form',
'views': [(False, "form")],
}
|
ExCiteS/geokey-wegovnow | geokey_wegovnow/tests/test_templatetags.py | Python | mit | 2,388 | 0 | """Test all template tags."""
from django.test import TestCase
from allauth.socialaccount.models import SocialApp, SocialAccount
from geokey.users.tests.model_factories import UserFactory
from geokey.users.templatetags.social import get_social_apps
from geokey_wegovnow.templatetags import wegovnow
class TemplateTagsTest(TestCase):
"""Tests for template tags."""
def test_exclude_uwum_app(self):
"""Test excluding UWUM app."""
socialapp_1 = SocialApp.objects.create(
provider='facebook',
name='Facebook',
client_id='xxxxxxxxxxxxxxxxxx',
secret='xxxxxxxxxxxxxxxxxx',
key=''
)
socialapp_2 = SocialApp.objects.create(
provider='twitter',
name='Twitter',
client_id='xxxxxxxxxxxxxxxxxx',
secret='xxxxxxxxxxxxxxxxxx',
key=''
)
socialapp_3 = SocialApp.objects.create(
provider='uwum',
name='UWUM',
client_id='xxxxxxxxxxxxxxxxxx',
secret='',
key=''
)
socialapps = wegovnow.exclude_uwum_app(get_social_apps())
self.assertTrue(socialapp_1 i | n socialapps)
self.assertTrue(socialapp_2 in socialapps)
self.assertFalse(socialapp_3 i | n socialapps)
def test_exclude_uwum_accounts(self):
"""Test excluding UWUM accounts."""
user = UserFactory.create()
socialaccount_1 = SocialAccount.objects.create(
user=user,
provider='facebook',
uid='5454'
)
socialaccount_2 = SocialAccount.objects.create(
user=user,
provider='twitter',
uid='5478'
)
socialaccount_3 = SocialAccount.objects.create(
user=user,
provider='uwum',
uid='1547'
)
socialaccount_4 = SocialAccount.objects.create(
user=user,
provider='uwum',
uid='5158'
)
socialaccounts = SocialAccount.objects.filter(user=user)
socialaccounts = wegovnow.exclude_uwum_accounts(socialaccounts)
self.assertTrue(socialaccount_1 in socialaccounts)
self.assertTrue(socialaccount_2 in socialaccounts)
self.assertFalse(socialaccount_3 in socialaccounts)
self.assertFalse(socialaccount_4 in socialaccounts)
|
StrellaGroup/erpnext | erpnext/hr/doctype/payroll_entry/payroll_entry.py | Python | gpl-3.0 | 21,415 | 0.027411 | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from dateutil.relativedelta import relativedelta
from frappe.utils import cint, flt, nowdate, add_days, getdate, fmt_money, add_to_date, DATE_FORMAT, date_diff
from frappe import _
from erpnext.accounts.utils import get_fiscal_year
from erpnext.hr.doctype.employee.employee import get_holiday_list_for_employee
class PayrollEntry(Document):
def onload(self):
if not self.docstatus==1 or self.salary_slips_submitted:
return
# check if salary slips were manually submitted
entries = frappe.db.count("Salary Slip", {'payroll_entry': self.name, 'docstatus': 1}, ['name'])
if cint(entries) == len(self.employees):
self.set_onload("submitted_ss", True)
def on_submit(self):
s | elf.create_salary_slips()
def before_submit(self):
if self.validate_attendance:
if self.validate_employee_atten | dance():
frappe.throw(_("Cannot Submit, Employees left to mark attendance"))
def on_cancel(self):
frappe.delete_doc("Salary Slip", frappe.db.sql_list("""select name from `tabSalary Slip`
where payroll_entry=%s """, (self.name)))
def get_emp_list(self):
"""
Returns list of active employees based on selected criteria
and for which salary structure exists
"""
cond = self.get_filter_condition()
cond += self.get_joining_relieving_condition()
condition = ''
if self.payroll_frequency:
condition = """and payroll_frequency = '%(payroll_frequency)s'"""% {"payroll_frequency": self.payroll_frequency}
sal_struct = frappe.db.sql_list("""
select
name from `tabSalary Structure`
where
docstatus = 1 and
is_active = 'Yes'
and company = %(company)s and
ifnull(salary_slip_based_on_timesheet,0) = %(salary_slip_based_on_timesheet)s
{condition}""".format(condition=condition),
{"company": self.company, "salary_slip_based_on_timesheet":self.salary_slip_based_on_timesheet})
if sal_struct:
cond += "and t2.salary_structure IN %(sal_struct)s "
cond += "and %(from_date)s >= t2.from_date"
emp_list = frappe.db.sql("""
select
distinct t1.name as employee, t1.employee_name, t1.department, t1.designation
from
`tabEmployee` t1, `tabSalary Structure Assignment` t2
where
t1.name = t2.employee
and t2.docstatus = 1
%s order by t2.from_date desc
""" % cond, {"sal_struct": tuple(sal_struct), "from_date": self.end_date}, as_dict=True)
return emp_list
def fill_employee_details(self):
self.set('employees', [])
employees = self.get_emp_list()
if not employees:
frappe.throw(_("No employees for the mentioned criteria"))
for d in employees:
self.append('employees', d)
self.number_of_employees = len(employees)
if self.validate_attendance:
return self.validate_employee_attendance()
def get_filter_condition(self):
self.check_mandatory()
cond = ''
for f in ['company', 'branch', 'department', 'designation']:
if self.get(f):
cond += " and t1." + f + " = '" + self.get(f).replace("'", "\'") + "'"
return cond
def get_joining_relieving_condition(self):
cond = """
and ifnull(t1.date_of_joining, '0000-00-00') <= '%(end_date)s'
and ifnull(t1.relieving_date, '2199-12-31') >= '%(start_date)s'
""" % {"start_date": self.start_date, "end_date": self.end_date}
return cond
def check_mandatory(self):
for fieldname in ['company', 'start_date', 'end_date']:
if not self.get(fieldname):
frappe.throw(_("Please set {0}").format(self.meta.get_label(fieldname)))
def create_salary_slips(self):
"""
Creates salary slip for selected employees if already not created
"""
self.check_permission('write')
self.created = 1
emp_list = [d.employee for d in self.get_emp_list()]
if emp_list:
args = frappe._dict({
"salary_slip_based_on_timesheet": self.salary_slip_based_on_timesheet,
"payroll_frequency": self.payroll_frequency,
"start_date": self.start_date,
"end_date": self.end_date,
"company": self.company,
"posting_date": self.posting_date,
"deduct_tax_for_unclaimed_employee_benefits": self.deduct_tax_for_unclaimed_employee_benefits,
"deduct_tax_for_unsubmitted_tax_exemption_proof": self.deduct_tax_for_unsubmitted_tax_exemption_proof,
"payroll_entry": self.name
})
if len(emp_list) > 30:
frappe.enqueue(create_salary_slips_for_employees, timeout=600, employees=emp_list, args=args)
else:
create_salary_slips_for_employees(emp_list, args, publish_progress=False)
# since this method is called via frm.call this doc needs to be updated manually
self.reload()
def get_sal_slip_list(self, ss_status, as_dict=False):
"""
Returns list of salary slips based on selected criteria
"""
cond = self.get_filter_condition()
ss_list = frappe.db.sql("""
select t1.name, t1.salary_structure from `tabSalary Slip` t1
where t1.docstatus = %s and t1.start_date >= %s and t1.end_date <= %s
and (t1.journal_entry is null or t1.journal_entry = "") and ifnull(salary_slip_based_on_timesheet,0) = %s %s
""" % ('%s', '%s', '%s','%s', cond), (ss_status, self.start_date, self.end_date, self.salary_slip_based_on_timesheet), as_dict=as_dict)
return ss_list
def submit_salary_slips(self):
self.check_permission('write')
ss_list = self.get_sal_slip_list(ss_status=0)
if len(ss_list) > 30:
frappe.enqueue(submit_salary_slips_for_employees, timeout=600, payroll_entry=self, salary_slips=ss_list)
else:
submit_salary_slips_for_employees(self, ss_list, publish_progress=False)
def email_salary_slip(self, submitted_ss):
if frappe.db.get_single_value("HR Settings", "email_salary_slip_to_employee"):
for ss in submitted_ss:
ss.email_salary_slip()
def get_loan_details(self):
"""
Get loan details from submitted salary slip based on selected criteria
"""
cond = self.get_filter_condition()
return frappe.db.sql(""" select eld.loan_account, eld.loan,
eld.interest_income_account, eld.principal_amount, eld.interest_amount, eld.total_payment,t1.employee
from
`tabSalary Slip` t1, `tabSalary Slip Loan` eld
where
t1.docstatus = 1 and t1.name = eld.parent and start_date >= %s and end_date <= %s %s
""" % ('%s', '%s', cond), (self.start_date, self.end_date), as_dict=True) or []
def get_salary_component_account(self, salary_component):
account = frappe.db.get_value("Salary Component Account",
{"parent": salary_component, "company": self.company}, "default_account")
if not account:
frappe.throw(_("Please set default account in Salary Component {0}")
.format(salary_component))
return account
def get_salary_components(self, component_type):
salary_slips = self.get_sal_slip_list(ss_status = 1, as_dict = True)
if salary_slips:
salary_components = frappe.db.sql("""select salary_component, amount, parentfield
from `tabSalary Detail` where parentfield = '%s' and parent in (%s)""" %
(component_type, ', '.join(['%s']*len(salary_slips))), tuple([d.name for d in salary_slips]), as_dict=True)
return salary_components
def get_salary_component_total(self, component_type = None):
salary_components = self.get_salary_components(component_type)
if salary_components:
component_dict = {}
for item in salary_components:
add_component_to_accrual_jv_entry = True
if component_type == "earnings":
is_flexible_benefit, only_tax_impact = frappe.db.get_value("Salary Component", item['salary_component'], ['is_flexible_benefit', 'only_tax_impact'])
if is_flexible_benefit == 1 and only_tax_impact ==1:
add_component_to_accrual_jv_entry = False
if add_component_to_accrual_jv_entry:
component_dict[item['salary_component']] = component_dict.get(item['salary_component'], 0) + item['amount']
account_details = self.get_account(component_dict = component_dict)
return account_details
def get_account(self, component_dict = None):
account_dict = {}
for s, a in component_dict.items():
account = self.get_salary_component_account(s)
accou |
saisankargochhayat/algo_quest | graph/dijkstra.py | Python | apache-2.0 | 4,465 | 0.006943 | import sys
class Vertex:
def __init__(self, node):
self.id = node
self.adjacent = {}
# Set distance to infinity for all nodes
self.distance = sys.maxint
# Mark all nodes unvisited
self.visited = False
# Predecessor
self.previous = None
def add_neighbor(self, neighbor, weight=0):
self.adjacent[neighbor] = weight
def get_connections(self):
return self.adjacent.keys()
def get_id(self):
return self.id
def get_weight(self, neighbor):
return self.adjacent[neighbor]
def set_distance(self, dist):
self.distance = dist
def get_distance(self):
return self.distance
def set_previous(self, prev):
self.previous = prev
def set_visited(self):
self.visited = True
def __str__(self):
return str(self.id) + ' adjacent: ' + str([x.id for x in self.adjacent])
class Graph:
def __init__(self):
self.vert_dict = {}
self.num_vertices = 0
def __iter__(self):
return iter(self.vert_dict.values())
def add_vertex(self, node):
self.num_vertices = self.num_vertices + 1
new_vertex = Vertex(node)
self.vert_dict[node] = new_vertex
return new_vertex
def get_vertex(self, n):
if n in self.vert_dict:
return self.vert_dict[n]
else:
return None
def add_edge(self, frm, to, cost = 0):
if frm not in self.vert_dict:
self.add_vertex(frm)
if to not in self.vert_dict:
self.add_vertex(to)
self.vert_dict[frm].add_neighbor(self.vert_dict[to], cost)
self.vert_dict[to].add_neighbor(self.vert_dict[frm], cost)
def get_vertices(self):
return self.vert_dict.keys()
def set_previous(self, current):
self.previous = current
def get_previous(self, current):
return self.previous
def shortest(v, path):
''' make shortest path from v.previous'''
if v.previous:
path.append(v.previous.get_id())
shortest(v.previous, path)
return
import heapq
def dijkstra(aGraph, start):
print '''Dijkstra's shortest path'''
# Set the distance for the start node to zero
start.set_distance(0)
# Put tuple pair into the priority queue
unvisited_queue = [(v.get_distance(),v) for v in aGraph]
heapq.heapify(unvisited_queue)
while len(unvisited_queue):
# Pops a vertex with the smallest distance
uv = heapq.heappop(unvisited_queue)
current = uv[1]
current.set_visited()
#for next in v.adjacent:
for next in current.adjacent:
# if visited, skip
if next.visited:
continue
new_dist = current.get_distance() + current.get_weight(next)
if new_dist < next.get_distance():
next.set_distance(new_dist)
next.set_previous(current)
print 'updated : current = %s next = %s new_dist = %s' \
%(current.get_id(), next.get_id(), next.get_distance())
else:
print 'not updated : current = %s next = %s new_dist = %s' \
%(current.get_id(), next.get_id(), next.get_distance())
# Rebuild heap
# 1. Pop every item
while len(unvisited_queue):
heapq.heappop(unvisited_queue)
# 2. Put all vertices not visited into the queue
unvisited_queue = [(v.get_distance(),v) for v in aGraph if not v.visited]
heapq.heapify(unvisited_queue)
if __name__ == '__main__':
g = Graph()
g.add_vertex('a')
g.add_vertex('b')
g.add_vertex('c')
g.add_vertex('d')
g.add_vertex('e')
g.add_vertex('f')
|
g.add_edge('a', 'b', 7)
g.add_edge('a', 'c', 9)
g.add_edge('a', 'f', 14)
g.add_edge('b', 'c', 10)
g.add_edge('b', 'd', 15)
g.add_edge('c', 'd', 11)
g.add_edge('c', 'f', 2)
g.add_edge('d', 'e', 6)
g.add_edge('e', 'f', 9)
print('Graph data:')
for v in g:
for w in v.get_connections():
vid = v.get_id()
wid = | w.get_id()
print('( %s , %s, %3d)' % ( vid, wid, v.get_weight(w)))
dijkstra(g, g.get_vertex('a'))
target = g.get_vertex('e')
path = [target.get_id()]
shortest(target, path)
print('The shortest path : %s' %(path[::-1]))
|
pyro-ppl/numpyro | numpyro/version.py | Python | apache-2.0 | 107 | 0 | # Copyright Contributors to the Pyro project.
# SPDX-Licens | e-Identifie | r: Apache-2.0
__version__ = "0.9.0"
|
jorisv/RBDynUrdf | binding/python/generate.py | Python | gpl-3.0 | 3,124 | 0.010243 | # This file is part of Tasks.
#
# Tasks is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tasks is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Tasks. If not, see <http://www.gnu.org/licenses/>.
from pybindgen import *
import sys
def import_rbd_types(mod):
mod.add_class('MultiBodyGraph', foreign_cpp_namespace='rbd', import_from_module='rbdyn')
mod.add_class('MultiBody', foreign_cpp_namespace='rbd', import_from_module='rbdyn')
if __name__ == '__main__':
if len(sys.argv) < 2:
sys.exit(1)
rbdyn_urdf = Module('_rbdyn_urdf', cpp_namespace='::rbdyn_urdf')
rbdyn_urdf.add_include('<Reader.h>')
rbdyn_urdf.add_include('<Writer.h>')
run_ex = rbdyn_urdf.add_exception('std::runtime_error', foreign_cpp_namespace=' ',
message_rvalue='%(EXC)s.what()')
# import rbd types
import_rbd_types(rbdyn_urdf)
# build list type
rbdyn_urdf.add_container('std::map<int, double>', ('int', 'double'), 'map')
# build struct
limits = rbdyn_urdf.add_struct('Limits')
limits.add_instance_attribute('ql', 'std::map<int, double>')
limits.add_instance_attribute('qu', 'std::map<int, double>')
limits.add_instance_attribute('vl', 'std::map<int, double>')
limits.add_instance_attribute('vu', 'std::map<int, double>')
limits.add_instance_ | attribute('tl', 'std::map<int, double>')
limits.add_instance_attribute('tu', 'std::map<int, double>')
urdf = rbdyn_urdf.add_struct('Urdf')
urdf.add_instance_attribute('mbg', 'rbd::MultiBodyGraph')
urdf.add_instance_attribute('limits', 'rbdyn_urdf::Limits')
# build function
rbdyn_urdf.add_function('readUrdf', retval('rbdyn_urdf::Urdf'),
[param('const std::string&', 'urdf')],
throw=[run_ex])
rbdyn_urdf.add_function(' | readUrdfFile', retval('rbdyn_urdf::Urdf'),
[param('const std::string&', 'fileName')],
throw=[run_ex])
rbdyn_urdf.add_function('writeUrdf', None,
[param('const std::string&', 'filename'),
param('const std::string&', 'robotName'),
param('const rbd::MultiBody&', 'mb'),
param('const rbdyn_urdf::Limits&', 'limits')],
throw=[run_ex])
rbdyn_urdf.add_function('writeUrdf', None,
[param('const std::string&', 'filename'),
param('const std::string&', 'robotName'),
param('const rbd::MultiBody&', 'mb')],
throw=[run_ex])
with open(sys.argv[1], 'w') as f:
rbdyn_urdf.generate(f)
|
cjaymes/pyscap | src/scap/model/ocil_2_0/ChoiceGroupType.py | Python | gpl-3.0 | 1,043 | 0.001918 | # Copyright 2016 Casey Jaymes
# This file is part of PySCAP.
#
# PySCAP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PySCAP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PySCAP. If not, see <http://www.gnu.org/licenses/>.
from scap.Model import Model
import logging
logge | r = logging.getLogger(__name__)
class ChoiceGroupType(Model):
MODEL_MAP = {
'elements': [
{'tag_name': 'choice', 'list': 'choices', 'class': 'ChoiceType', 'min': 1},
],
'attributes': {
| 'id': {'type': 'ChoiceGroupIDPattern', 'required': True},
},
}
|
theo-l/django_common | models/__init__.py | Python | mit | 182 | 0 | # -*- coding: utf-8 -*-
# @Author: theo-l
# @Date: 2017-06-28 20:38:33
# @Last Modified by: theo-l
# @Last Modified time: 2017-07-08 20:50:07
fr | om .base_models | import BaseModel
|
Rademade/taiga-back | taiga/base/api/renderers.py | Python | agpl-3.0 | 24,541 | 0.000897 | # Copyright (C) 2014-2016 Andrey Antukh <niwi@niwi.nz>
# Copyright (C) 2014-2016 Jesús Espino <jespinog@gmail.com>
# Copyright (C) 2014-2016 David Barragán <bameda@dbarragan.com>
# Copyright (C) 2014-2016 Alejandro Alonso <alejandro.alonso@kaleidos.net>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied war | ranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# The code is partially taken (a | nd modified) from django rest framework
# that is licensed under the following terms:
#
# Copyright (c) 2011-2014, Tom Christie
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Renderers are used to serialize a response into specific media types.
They give us a generic way of being able to handle various media types
on the response, such as JSON encoded data or HTML output.
REST framework also provides an HTML renderer the renders the browsable API.
"""
import django
from django import forms
from django.core.exceptions import ImproperlyConfigured
from django.http.multipartparser import parse_header
from django.template import RequestContext, loader, Template
from django.test.client import encode_multipart
from django.utils import six
from django.utils.encoding import smart_text
from django.utils.six import StringIO
from django.utils.xmlutils import SimplerXMLGenerator
from taiga.base import exceptions, status
from taiga.base.exceptions import ParseError
from . import VERSION
from .request import is_form_media_type, override_method
from .settings import api_settings
from .utils import encoders
from .utils.breadcrumbs import get_breadcrumbs
import json
import copy
class BaseRenderer(object):
"""
All renderers should extend this class, setting the `media_type`
and `format` attributes, and override the `.render()` method.
"""
media_type = None
format = None
charset = "utf-8"
render_style = "text"
def render(self, data, accepted_media_type=None, renderer_context=None):
raise NotImplemented("Renderer class requires .render() to be implemented")
class JSONRenderer(BaseRenderer):
"""
Renderer which serializes to JSON.
Applies JSON's backslash-u character escaping for non-ascii characters.
"""
media_type = "application/json"
format = "json"
encoder_class = encoders.JSONEncoder
ensure_ascii = True
charset = None
# JSON is a binary encoding, that can be encoded as utf-8, utf-16 or utf-32.
# See: http://www.ietf.org/rfc/rfc4627.txt
# Also: http://lucumr.pocoo.org/2013/7/19/application-mimetypes-and-encodings/
def _get_indent(self, accepted_media_type, renderer_context):
# If "indent" is provided in the context, then pretty print the result.
# E.g. If we"re being called by the BrowsableAPIRenderer.
renderer_context = renderer_context or {}
indent = renderer_context.get("indent", None)
if accepted_media_type:
# If the media type looks like "application/json; indent=4",
# then pretty print the result.
base_media_type, params = parse_header(accepted_media_type.encode("ascii"))
indent = params.get("indent", indent)
try:
indent = max(min(int(indent), 8), 0)
except (ValueError, TypeError):
indent = None
return indent
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Render `data` into JSON.
"""
if data is None:
return bytes()
indent = self._get_indent(accepted_media_type, renderer_context)
ret = json.dumps(data, cls=self.encoder_class,
indent=indent, ensure_ascii=self.ensure_ascii)
# On python 2.x json.dumps() returns bytestrings if ensure_ascii=True,
# but if ensure_ascii=False, the return type is underspecified,
# and may (or may not) be unicode.
# On python 3.x json.dumps() returns unicode strings.
if isinstance(ret, six.text_type):
return bytes(ret.encode("utf-8"))
return ret
def render_to_file(self, data, outputfile, accepted_media_type=None, renderer_context=None):
"""
Render `data` into a file with JSON format.
"""
if data is None:
return bytes()
indent = self._get_indent(accepted_media_type, renderer_context)
ret = json.dump(data, outputfile, cls=self.encoder_class,
indent=indent, ensure_ascii=self.ensure_ascii)
class UnicodeJSONRenderer(JSONRenderer):
ensure_ascii = False
"""
Renderer which serializes to JSON.
Does *not* apply JSON's character escaping for non-ascii characters.
"""
class JSONPRenderer(JSONRenderer):
"""
Renderer which serializes to json,
wrapping the json output in a callback function.
"""
media_type = "application/javascript"
format = "jsonp"
callback_parameter = "callback"
default_callback = "callback"
charset = "utf-8"
def get_callback(self, renderer_context):
"""
Determine the name of the callback to wrap around the json output.
"""
request = renderer_context.get("request", None)
params = request and request.QUERY_PARAMS or {}
return params.get(self.callback_parameter, self.default_callback)
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders into jsonp, wrapping the json output in a callback function.
Clients may set the callback function name using a query parameter
on the URL, for example: ?callback=exampleCallbackName
"""
renderer_context = renderer_context or {}
callback = self.get_callback(renderer_context)
json = super(JSONPRenderer, self).render(data, accepted_media_type,
renderer_context)
return callback.encode(self.charset) + b"(" + json + b");"
class XMLRenderer(BaseRenderer):
"""
Renderer which serializes to XML.
"""
media_type = "application/xml"
format = "xml"
charset = "utf-8"
def render(self, data, accepted_media_type=None, renderer_context=None):
"""
Renders `data` into serialized XML.
"""
if data is None:
return ""
stream = StringIO()
xml = SimplerXMLGenerator(stream, self.charset)
|
nophead/Skeinforge50plus | skeinforge_application/skeinforge_plugins/craft_plugins/home.py | Python | agpl-3.0 | 8,025 | 0.023427 | """
This page is in the table of contents.
Plugin to home the tool at beginning of each layer.
The home manual page is at:
http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Home
==Operation==
The default 'Activate Home' checkbox is on. When it is on, the functions described below will work, when it is off, nothing will be done.
==Settings==
===Name of Home File===
Default: home.gcode
At the beginning of a each layer, home will add the commands of a gcode script with the name of the "Name of Home File" setting, if one exists. Home does not care if the text file names are capitalized, but some file systems do not handle file name cases properly, so to be on the safe side you should give them lower case names. Home looks for those files in the alterations folder in the .skeinforge folder in the home directory. If it doesn't find the file it then looks in the alterations folder in the skeinforge_plugins folder.
==Examples==
The following examples home the file Screw Holder Bottom.stl. The examples are run in a terminal in the folder which contains Screw Holder Bottom.stl and home.py.
> python home.py
This brings up the home dialog.
> python home.py Screw Holder Bottom.stl
The home tool is parsing the file:
Screw Holder Bottom.stl
..
The home tool has created the file:
.. Screw Holder Bottom_home.gcode
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities.fabmetheus_tools import fabmetheus_interpret
from fabmetheus_utilities.vector3 import Vector3
from fabmetheus_utilities import archive
from fabmetheus_utilities import euclidean
from fabmetheus_utilities import gcodec
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_craft
from skeinforge_application.skeinforge_utilities import skeinforge_polyfile
from skeinforge_application.skeinforge_utilities import skeinforge_profile
import math
import os
import sys
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def getCraftedText( fileName, text, repository = None ):
"Home a gcode linear move file or text."
return getCraftedTextFromText(archive.getTextIfEmpty(fileName, text), repository)
def getCraftedTextFromText( gcodeText, repository = None ):
"Home a gcode linear move text."
if gcodec.isProcedureDoneOrFileIsEmpty( gcodeText, 'home'):
return gcodeText
if repository == None:
repository = settings.getReadRepository( HomeRepository() )
if not repository.activateHome.value:
return gcodeText
return HomeSkein().getCraftedGcode(gcodeText, repository)
def getNewRepository():
'Get new repository.'
return HomeRepository()
def writeOutput(fileName, shouldAnalyze=True):
"Home a gcode linear move file. Chain home the gcode if it is not already homed."
skeinforge_craft.writeChainTextWithNounMessage(fileName, 'home', shouldAnalyze)
class HomeRepository:
"A class to handle the home settings."
def __init__(self):
"Set the default settings, execute title & settings fileName."
skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_plugins.craft_plugins.home.html', self)
self.fileNameInput = settings.FileNameInput().getFromFileName( fabmetheus_interpret.getGNUTranslatorGcodeFileTypeTuples(), 'Open File for Home', self, '')
self.openWikiManualHelpPage = settings.HelpPage().getOpenFromAbsolute('http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Home')
self.activateHome = settings.BooleanSetting().getFromValue('Activate Home', self, True )
self.nameOfHomeFile = settings.StringSetting().getFromValue('Name of Home File:', self, 'home.gcode')
self.executeTitle = 'Home'
def execute(self):
"Home button has been clicked."
fileNames = skeinforge_polyfile.getFileOrDirectoryTypesUnmodifiedGcode(self.fileNameInput.value, fabmetheus_interpret.getImportPluginFileNames(), self.fileNameInput.wasCancelled)
for fileName in fileNames:
writeOutput(fileName)
class HomeSkein:
"A class to home a skein of extrusions."
def __init__(self):
self.distanceFeedRate = gcodec.DistanceFeedRate()
self.extruderActive = False
self.highestZ = None
self.homeLines = []
self.layerCount = settings.LayerCount()
self.lineIndex = 0
self.lines = None
self.oldLocation = None
self.shouldHome = False
self.travelFeedRateMinute = 957.0
def addFloat( self, begin, end ):
"Add dive to the original height."
beginEndDistance = begin.distance(end)
alongWay = self.absoluteEdgeWidth / beginEndDistance
closeToEnd = euclidean.getIntermediateLocation( alongWay, end, begin )
closeToEnd.z = self.highestZ
self.distanceFeedRate.addLine( self.distanceFeedRate.getLinearGcodeMovementWithFeedRate( self.travelFeedRateMinute, closeToEnd.dropAxis(), closeToEnd.z ) )
def addHomeTravel( self, splitLine ):
"Add the home travel gcode."
location = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine)
self.highestZ = max( self.highestZ, location.z )
if not self.shouldHome:
return
self.shouldHome = False
if self.oldLocation == None:
return
if self.extruderActive:
self.distanceFeedRate.addLine('M103')
self.addHopUp( self.oldLocation )
self.distanceFeedRate.addLinesSetAbsoluteDistanceMode(self.homeLines)
self.addHopUp( self.oldLocation )
self.addFloat( self.oldLocation, location )
if self.extruderActive:
self.distanceFeedRate.addLine('M101')
def addHopUp(self, location):
"Add hop to highest point."
l | ocationUp = Vector3( location.x, location.y, self.highestZ )
self.distanceFeedRate.addLine( self.distanceFeedRate.getLinearGcodeMovementWithFeedRate( self.travelFeedRateMinut | e, locationUp.dropAxis(), locationUp.z ) )
def getCraftedGcode( self, gcodeText, repository ):
"Parse gcode text and store the home gcode."
self.repository = repository
self.homeLines = settings.getAlterationFileLines(repository.nameOfHomeFile.value)
if len(self.homeLines) < 1:
return gcodeText
self.lines = archive.getTextLines(gcodeText)
self.parseInitialization( repository )
for self.lineIndex in xrange(self.lineIndex, len(self.lines)):
line = self.lines[self.lineIndex]
self.parseLine(line)
return self.distanceFeedRate.output.getvalue()
def parseInitialization( self, repository ):
'Parse gcode initialization and store the parameters.'
for self.lineIndex in xrange(len(self.lines)):
line = self.lines[self.lineIndex]
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
firstWord = gcodec.getFirstWord(splitLine)
self.distanceFeedRate.parseSplitLine(firstWord, splitLine)
if firstWord == '(</extruderInitialization>)':
self.distanceFeedRate.addTagBracketedProcedure('home')
return
elif firstWord == '(<edgeWidth>':
self.absoluteEdgeWidth = abs(float(splitLine[1]))
elif firstWord == '(<travelFeedRatePerSecond>':
self.travelFeedRateMinute = 60.0 * float(splitLine[1])
self.distanceFeedRate.addLine(line)
def parseLine(self, line):
"Parse a gcode line and add it to the bevel gcode."
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
if len(splitLine) < 1:
return
firstWord = splitLine[0]
if firstWord == 'G1':
self.addHomeTravel(splitLine)
self.oldLocation = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine)
elif firstWord == '(<layer>':
self.layerCount.printProgressIncrement('home')
if len(self.homeLines) > 0:
self.shouldHome = True
elif firstWord == 'M101':
self.extruderActive = True
elif firstWord == 'M103':
self.extruderActive = False
self.distanceFeedRate.addLine(line)
def main():
"Display the home dialog."
if len(sys.argv) > 1:
writeOutput(' '.join(sys.argv[1 :]))
else:
settings.startMainLoopFromConstructor(getNewRepository())
if __name__ == "__main__":
main()
|
auready/django | tests/admin_views/models.py | Python | bsd-3-clause | 24,291 | 0.000618 | import datetime
import os
import tempfile
import uuid
from django.contrib.auth.models import User
from django.contrib.contenttypes.fields import (
GenericForeignKey, GenericRelation,
)
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.files.storage import FileSystemStorage
from django.db import models
class Section(models.Model):
"""
A simple section that links to articles, to test linking to related items
in admin views.
"""
name = models.CharField(max_length=100)
def __str__(self):
return self.name
@property
def name_property(self):
"""
A property that simply returns the name. Used to test #24461
"""
return self.name
class Article(models.Model):
"""
A simple article to test admin views. Test backwards compatibility.
"""
title = models.CharField(max_length=100)
content = models.TextField()
date = models.DateTimeField()
section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True)
another_section = models.ForeignKey(Section, models.CASCADE, null=True, blank=True, related_name='+')
sub_section = models.ForeignKey(Section, models.SET_NULL, null=True, blank=True, related_name='+')
def __str__(self):
return self.title
def model_year(self):
return self.date.year
model_year.admin_order_field = 'date'
model_year.short_description = ''
def model_year_reversed(self):
return self.date.year
model_year_reversed.admin_order_field = '-date'
model_year_reversed.short_description = ''
class Book(models.Model):
"""
A simple book that has chapters.
"""
name = models.CharField(max_length=100, verbose_name='¿Name?')
def __str__(self):
return self.name
class Promo(models.Model):
name = models.CharField(max_length=100, verbose_name='¿Name?')
book = models.ForeignKey(Book, models.CASCADE)
def __str__(self):
return self.name
class Chapter(models.Model):
title = models.CharField(max_length=100, verbose_name='¿Title?')
content = models.TextField()
book = models.ForeignKey(Book, models.CASCADE)
def __str__(self):
return self.title
class Meta:
# Use a utf-8 bytestring to ensure it works (see #11710)
verbose_name = '¿Chapter?'
class ChapterXtra1(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?')
xtra = models.CharField(max_length=100, verbose_name='¿Xtra?')
def __str__(self):
return '¿Xtra1: %s' % self.xtra
class ChapterXtra2(models.Model):
chap = models.OneToOneField(Chapter, models.CASCADE, verbose_name='¿Chap?')
xtra = models.CharField(max_length=100, verbose_name='¿Xtra?')
def __str__(self):
return '¿Xtra2: %s' % self.xtra
class RowLevelChangePermissionModel(models.Model):
name = models.CharField(max_length=100, blank=True)
class CustomArticle(models.Model):
content = models.TextField()
date = models.DateTimeField()
class ModelWithStringPrimaryKey(models.Model):
string_pk = models.CharField(max_length=255, primary_key=True)
def __str__(self):
return self.string_pk
def get_absolute_url(self):
return '/dummy/%s/' % self.string_pk
class Color(models.Model):
value = models.CharField(max_length=10)
warm = models.BooleanField(default=False)
def __str__(self):
return self.value
# we replicate Color to register with another ModelAdmin
class Color2(Color):
class Meta:
proxy = True
class Thing(models.Model):
title = models.CharField(max_length=20)
color = models.ForeignKey(Color, models.CASCADE, limit_choices_to={'warm': True})
pub_date = models.DateField(blank=True, null=True)
def __str__(self):
return self.title
class Actor(models.Model):
name = models.CharField(max_length=50)
age = models.IntegerField()
title = models.CharField(max_length=50, null=True, blank=True)
def __str__(self):
return self.name
class Inquisition(models.Model):
expected = models.BooleanField(default=False)
leader = models.ForeignKey(Actor, models.CASCADE)
country = models.CharField(max_length=20)
def __str__(self):
return "by %s from %s" % (self.leader, self.country)
class Sketch(models.Model):
title = models.CharField(max_length=100)
inquisition = models.ForeignKey(
Inquisition,
models.CASCADE,
limit_choices_to={
'leader__name': 'Palin',
'leader__age': 27,
'expected': False,
},
)
defendant0 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={'title__isnull': False},
related_name='as_defendant0',
)
defendant1 = models.ForeignKey(
Actor,
models.CASCADE,
limit_choices_to={'title__isnull': True},
related_name='as_defendant1',
)
def __str__(self):
return self.title
def today_callable_dict():
return {"last_action__gte": datetime.datetime.today()}
def today_callable_q():
return models.Q(last_action__gte=datetime.datetime.today())
class Character(models.Model):
username = models.CharField(max_length=100)
last_action = models.DateTimeField()
def __str__(self):
return self.username
class StumpJoke(models.Model):
variation = models.CharField(max_length=100)
most_recently_fooled = models.ForeignKey(
Character,
models.CASCADE,
limit_choices_to=today_callable_dict,
related_name="+",
)
has_fooled_today = models.ManyToManyField(Character, limit_choices_to=today_callable_q, related_name="+")
def __str__(self):
return self.variation
class Fabric(models.Model):
NG_CHOICES = (
('Textured', (
('x', 'Horizontal'),
('y', 'Vertical'),
)),
('plain', 'Smooth'),
)
surface = models.CharField(max_length=20, choices=NG_CHOICES)
class Person(models.Model):
GENDER_CHOICES = (
(1, "Male"),
(2, "Female"),
)
name = mo | dels.CharField(max_length=100)
gender = models.IntegerField(choices=GENDER_CHOICES)
age = models.IntegerField(default=21)
alive = models.BooleanField(default=True)
def __str__(self):
return self.name
class Persona(models.Model):
"""
A simple pers | ona associated with accounts, to test inlining of related
accounts which inherit from a common accounts class.
"""
name = models.CharField(blank=False, max_length=80)
def __str__(self):
return self.name
class Account(models.Model):
"""
A simple, generic account encapsulating the information shared by all
types of accounts.
"""
username = models.CharField(blank=False, max_length=80)
persona = models.ForeignKey(Persona, models.CASCADE, related_name="accounts")
servicename = 'generic service'
def __str__(self):
return "%s: %s" % (self.servicename, self.username)
class FooAccount(Account):
"""A service-specific account of type Foo."""
servicename = 'foo'
class BarAccount(Account):
"""A service-specific account of type Bar."""
servicename = 'bar'
class Subscriber(models.Model):
name = models.CharField(blank=False, max_length=80)
email = models.EmailField(blank=False, max_length=175)
def __str__(self):
return "%s (%s)" % (self.name, self.email)
class ExternalSubscriber(Subscriber):
pass
class OldSubscriber(Subscriber):
pass
class Media(models.Model):
name = models.CharField(max_length=60)
class Podcast(Media):
release_date = models.DateField()
class Meta:
ordering = ('release_date',) # overridden in PodcastAdmin
class Vodcast(Media):
media = models.OneToOneField(Media, models.CASCADE, primary_key=True, parent_link=True)
released = models.BooleanField(default=False)
class Parent(models.Model):
name = models.CharField(max_length=128)
def clean(self):
if self.name == '_invalid':
|
bluesliverx/smartthings-src | apps/wifi-104-ssdp-server/lib/ssdp.py | Python | apache-2.0 | 8,142 | 0.001105 | # Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2005, Tim Potter <tpot@samba.org>
# Copyright 2006 John-Mark Gurney <gurney_j@resnet.uroegon.edu>
# Copyright (C) 2006 Fluendo, S.A. (www.fluendo.com).
# Copyright 2006,2007,2008,2009 Frank Scholz <coherence@beebits.net>
# Copyright 2016 Erwan Martin <public@fzwte.net>
#
# Implementation of a SSDP server.
#
import random
i | mport time
import socket
import logging
from email.utils import formatdate
from errno import | ENOPROTOOPT
SSDP_PORT = 1900
SSDP_ADDR = '239.255.255.250'
SERVER_ID = 'Wifi 104 SSDP Server'
logger = logging.getLogger('ssdp')
logger.setLevel('WARNING')
class SSDPServer:
"""A class implementing a SSDP server. The notify_received and
searchReceived methods are called when the appropriate type of
datagram is received by the server."""
known = {}
def __init__(self):
self.sock = None
def run(self):
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, "SO_REUSEPORT"):
try:
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
except socket.error as le:
# RHEL6 defines SO_REUSEPORT but it doesn't work
if le.errno == ENOPROTOOPT:
pass
else:
raise
addr = socket.inet_aton(SSDP_ADDR)
interface = socket.inet_aton('0.0.0.0')
cmd = socket.IP_ADD_MEMBERSHIP
self.sock.setsockopt(socket.IPPROTO_IP, cmd, addr + interface)
self.sock.bind(('0.0.0.0', SSDP_PORT))
self.sock.settimeout(1)
while True:
try:
data, addr = self.sock.recvfrom(1024)
self.datagram_received(data, addr)
except socket.timeout:
continue
self.shutdown()
def shutdown(self):
for st in self.known:
if self.known[st]['MANIFESTATION'] == 'local':
self.do_byebye(st)
def datagram_received(self, data, host_port):
"""Handle a received multicast datagram."""
(host, port) = host_port
try:
header, payload = data.decode().split('\r\n\r\n')[:2]
except ValueError as err:
logger.error(err)
return
lines = header.split('\r\n')
cmd = lines[0].split(' ')
lines = map(lambda x: x.replace(': ', ':', 1), lines[1:])
lines = filter(lambda x: len(x) > 0, lines)
headers = [x.split(':', 1) for x in lines]
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
logger.info('SSDP command %s %s - from %s:%d' % (cmd[0], cmd[1], host, port))
logger.debug('with headers: {}.'.format(headers))
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
# SSDP discovery
self.discovery_request(headers, (host, port))
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
# SSDP presence
logger.debug('NOTIFY *')
else:
logger.warning('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
def register(self, manifestation='', usn='', st='', location='', server=SERVER_ID, cache_control='max-age=1800', silent=False,
host=None):
"""Register a service or device that this SSDP server will
respond to."""
if not manifestation or not usn or not st or not location:
raise Exception('invalid arguments to register SSDP server')
logging.info('Registering %s (%s)' % (st, location))
self.known[usn] = {}
self.known[usn]['USN'] = usn
self.known[usn]['LOCATION'] = location
self.known[usn]['ST'] = st
self.known[usn]['EXT'] = ''
self.known[usn]['SERVER'] = server
self.known[usn]['CACHE-CONTROL'] = cache_control
self.known[usn]['MANIFESTATION'] = manifestation
self.known[usn]['SILENT'] = silent
self.known[usn]['HOST'] = host
self.known[usn]['last-seen'] = time.time()
if manifestation == 'local' and self.sock:
self.do_notify(usn)
def unregister(self, usn):
logger.info("Un-registering %s" % usn)
del self.known[usn]
def is_known(self, usn):
return usn in self.known
def send_it(self, response, destination, delay, usn):
logger.debug('send discovery response delayed by %ds for %s to %r' % (delay, usn, destination))
try:
self.sock.sendto(response.encode(), destination)
except (AttributeError, socket.error) as msg:
logger.warning("failure sending out byebye notification: %r" % msg)
def discovery_request(self, headers, host_port):
"""Process a discovery request. The response must be sent to
the address specified by (host, port)."""
(host, port) = host_port
logger.info('Discovery request from (%s,%d) for %s' % (host, port, headers['st']))
logger.info('Discovery request for %s' % headers['st'])
# Do we know about this service?
for i in self.known.values():
if i['MANIFESTATION'] == 'remote':
continue
if headers['st'] == 'ssdp:all' and i['SILENT']:
continue
if i['ST'] == headers['st'] or headers['st'] == 'ssdp:all':
response = ['HTTP/1.1 200 OK']
usn = None
for k, v in i.items():
if k == 'USN':
usn = v
if k not in ('MANIFESTATION', 'SILENT', 'HOST'):
response.append('%s: %s' % (k, v))
if usn:
response.append('DATE: %s' % formatdate(timeval=None, localtime=False, usegmt=True))
response.extend(('', ''))
delay = random.randint(0, int(headers['mx']))
self.send_it('\r\n'.join(response), (host, port), delay, usn)
def do_notify(self, usn):
"""Do notification"""
if self.known[usn]['SILENT']:
return
logger.info('Sending alive notification for %s' % usn)
resp = [
'NOTIFY * HTTP/1.1',
'HOST: %s:%d' % (SSDP_ADDR, SSDP_PORT),
'NTS: ssdp:alive',
]
stcpy = dict(self.known[usn].items())
stcpy['NT'] = stcpy['ST']
del stcpy['ST']
del stcpy['MANIFESTATION']
del stcpy['SILENT']
del stcpy['HOST']
del stcpy['last-seen']
resp.extend(map(lambda x: ': '.join(x), stcpy.items()))
resp.extend(('', ''))
logger.debug('do_notify content', resp)
try:
self.sock.sendto('\r\n'.join(resp).encode(), (SSDP_ADDR, SSDP_PORT))
self.sock.sendto('\r\n'.join(resp).encode(), (SSDP_ADDR, SSDP_PORT))
except (AttributeError, socket.error) as msg:
logger.warning("failure sending out alive notification: %r" % msg)
def do_byebye(self, usn):
"""Do byebye"""
logger.info('Sending byebye notification for %s' % usn)
resp = [
'NOTIFY * HTTP/1.1',
'HOST: %s:%d' % (SSDP_ADDR, SSDP_PORT),
'NTS: ssdp:byebye',
]
try:
stcpy = dict(self.known[usn].items())
stcpy['NT'] = stcpy['ST']
del stcpy['ST']
del stcpy['MANIFESTATION']
del stcpy['SILENT']
del stcpy['HOST']
del stcpy['last-seen']
resp.extend(map(lambda x: ': '.join(x), stcpy.items()))
resp.extend(('', ''))
logger.debug('do_byebye content', resp)
if self.sock:
try:
self.sock.sendto('\r\n'.join(resp), (SSDP_ADDR, SSDP_PORT))
except (AttributeError, socket.error) as msg:
logger.error("failure sending out byebye notification: %r" % msg)
except KeyError as msg:
logger.error("error building byebye notification: %r" % msg)
|
tudennis/LeetCode---kamyu104-11-24-2015 | Python/move-zeroes.py | Python | mit | 1,485 | 0 | from __future__ import print_function
# Time: O(n)
# Space: O(1)
# Given an array nums, write a function to move all 0's
# to the end of it while maintaining the relative order
# of the non-zero elements.
#
# For example, given nums = [0, 1, 0, 3, 12], after
# calling your function, nums should be [1, 3, 12, 0, 0].
#
# Note:
# You must do this in-place without making a copy of the array.
# Minimize the total number of operations.
class Solution(object):
def moveZeroes(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
pos = 0
for i in xrange(len(nums)):
if nums[i]:
nums[i], nums[pos] = nums[pos], nums[i]
pos += 1
def moveZeroes2(self, nums):
"""
:type nums: List[int]
:rtype: void Do not return anything, modify nums in-place instead.
"""
nums.sort(cmp=lambda a, b: 0 if b else -1)
class Solution2(object):
def moveZeroes(self, nums):
| """
:type nums: List[int]
:rtype: void Do not return an | ything, modify nums in-place instead.
"""
pos = 0
for i in xrange(len(nums)):
if nums[i]:
nums[pos] = nums[i]
pos += 1
for i in xrange(pos, len(nums)):
nums[i] = 0
if __name__ == '__main__':
s = Solution()
r = s.moveZeroes([0, 1, 0, 3, 12])
print(r)
|
tempbottle/ironpython3 | Tests/compat/sbs_exceptions/try_except3.py | Python | apache-2.0 | 904 | 0.009956 | #####################################################################################
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# This source code is subject to terms and conditions of the Apache License, Version 2.0. A
# copy of the license can be found in the License.html file at the root of this distribution. If
# you cannot locate the Apache License, Version 2.0, please send an em | ail to
# ironpy@microsoft.com. By using this source code in any fashion, you are agreeing to be bound
# by the terms of the Apache License, Version 2.0.
#
# You must not remove this notice, or any other, from this software.
#
#
################################################################################### | ##
from common import runtests
from .shared import try_except_maker3
from .shared import setGenerator, test_exceptions
setGenerator(try_except_maker3)
runtests(test_exceptions)
|
wernersa/Streprogen_web | app/__init__.py | Python | gpl-3.0 | 1,393 | 0.007179 | # -*- coding: utf-8 -*-
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
import os
from os import listdir
from os.path import isfile, join
import random
import datetime
def files_in_dir(directory):
"""
:param directory: The directory
:return: List of all files in directory
"""
return [f for f in listdir(directory) if isfile(join(directory,f))]
def random_string(length):
"""
:param length: Legnth of the returned string
:return: String og random characters
"""
chars = 'QAZWSXEDCRFVTGBYHNUJMKLP23456789'
return ''.join([random.choice(chars) for i in range(length)])
def is_christmas():
now = datetime.datetime.now()
if now.month != 12:
return False
if now.day < 30 and now.day > 10:
return True
return False
basedir = os.path.abspath(os.path.dirname(__file__))
app = Flask(__name__)
#app.config['CSRF_ENABLED'] = True
app.config['SECRET_KEY'] = '5d6d3e2u8d5g2D4S5DSF2sdf5s1df531sef'
app.conf | ig['SQLALCHEMY_DATABASE_URI'] = 'sqlite:/ | //' + os.path.join(basedir, 'database.db')
db = SQLAlchemy(app)
app.jinja_env.globals.update(enumerate=enumerate, is_christmas=is_christmas)
from . import views, models
# Create database if it's not there
for file in files_in_dir(basedir):
if 'database.db' in file:
break
else:
db.create_all()
print('No DB. Creating....')
|
jemromerol/apasvo | bin/apasvo-generator.py | Python | gpl-3.0 | 21,718 | 0.001105 | #!/usr/bin/python2.7
# encoding: utf-8
'''Earthquake Generator
A tool that generates synthetic seismic signals.
@author: Jose Emilio Romero Lopez
@copyright: Copyright 2013-2014, Jose Emilio Romero Lopez.
@license: GPL
@contact: jemromerol@gmail.com
This file is part of APASVO.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import argparse
import os
import sys
from apasvo._version import __version__
from apasvo.utils import clt, parse, futils
from apasvo.utils.formats import rawfile
from apasvo.picking import eqgenerator
def print_settings(args):
"""Prints settings to stdout.
Args:
args: Command-line input arguments.
"""
sys.stdout.write("\nGeneral settings:\n")
sys.stdout.write("%30s: %s\n" % ("Signal frequency(Hz)",
args.fs))
sys.stdout.write("%30s: %s\n" % ("Length(s)",
args.length))
sys.stdout.write("%30s: %s\n" % ("Start time(s)",
args.t_event))
sys.stdout.write("%30s: %s\n" % ("Noise power(dB)",
args.P_noise_db))
if not args.FILEIN:
sys.stdout.write("%30s: %s\n" % ("Event power(dB)",
args.gen_event_power))
sys.stdout.write("\nFilter bank settings:\n")
sys.stdout.write("%30s: %s\n" % ("Start frequency(Hz)",
args.f_low))
sys.stdout.write("%30s: %s\n" % ("End frequency(Hz)",
args.f_high))
sys.stdout.write("%30s: %s\n" % ("Subband bandwidth(Hz)",
args.bandwidth))
sys.stdout.write("%30s: %s\n" % ("Subband overlap(Hz)",
args.overlap))
sys.stdout.write("%30s: %s\n" % ("Start envelope length(s)",
args.low_period))
sys.stdout.write("%30s: %s\n" % ("End envelope length(s)",
args.high_period))
sys.stdout.write("%30s: %s\n" % ("Start amplitude",
args.low_amp))
sys.stdout.write("%30s: %s\n" % ("End amplitude",
args.high_amp))
sys.stdout.write("\n")
sys.stdout.flush()
def generate(FILEIN, length, t_event, output, gen_event_power=5.0, n_events=1,
gen_noise_coefficients=False, output_format='binary',
datatype='float64', byteorder='native', **kwargs):
"""Generates synthetic earthquake signals with background noise and saves
them to file.
The function accepts a list of command-line arguments and renders synthetic
seismic data in two ways: If a list of input files containing seismic data
is provided, the function generates a new output signal for each one of
the files by adding background noise. If no input file is provided,
the function generates a list of synthetic seismic signals.
Args:
FILEIN: A list of binary or text file objects storing seismic data.
length: Length of rendered seismic signals, in seconds.
If FILEIN is None, this parameter has no effect.
t_event: Start time of rendered earthquake, given in seconds from the
| beginning of the signal.
If FILEIN is None, this parameter has no effect.
output: Output file name (absolute path).
If no input file is provided and n_events is greater than 1, the
| name of each generated file will be followed by its ordinal number.
E.g. given FILEIN = None, output = 'example.out' and n_events = 5,
the function will generate 5 synthetic files named:
'example00.out', 'example01.out', 'example02.out', 'example03.out'
and 'example04.out'.
gen_event_power: Earthquake power in dB.
If FILEIN is None, this parameter has no effect.
Default: 5.0.
n_events: No. of signals to generate.
If FILEIN is None, this parameter has no effect.
Default: 1.
gen_noise_coefficients: A binary or text file object containing a list
of numeric coefficients of a FIR filter that models the background
noise.
Default value is False, meaning unfiltered white noise is used
to model the background noise.
output_format: Output file format. Possible values are 'binary' or
'text'. Default: 'binary'.
datatype: Data-type of generated data. Default value is 'float64'.
If FILEIN is not None, this parameter is also the datatype of
input data.
byteorder: Byte-order of generated data. Possible values are
'little-endian', 'big-endian' and 'native'.
If FILEIN is not None, this parameter is also the format of
input data.
Default value is 'native'.
"""
fs = kwargs.get('fs', 50.0)
# Configure generator
clt.print_msg("Configuring generator... ")
generator = eqgenerator.EarthquakeGenerator(**kwargs)
clt.print_msg("Done\n")
# Load noise coefficients
if gen_noise_coefficients:
if futils.istextfile(gen_noise_coefficients):
f = open(gen_noise_coefficients, 'r')
else:
f = open(gen_noise_coefficients, 'rb')
clt.print_msg("Loading noise coefficients from %s... " %
f.name)
generator.load_noise_coefficients(f, dtype=datatype,
byteorder=byteorder)
clt.print_msg("Done\n")
# Process input files
basename, ext = os.path.splitext(output)
filename_out = output
# If a list of input files containing seismic data
# is provided, generate a new output signal for each one of
# the files by adding background noise.
if FILEIN:
fileno = 0
for f in FILEIN:
# Read input signal
fin_handler = rawfile.get_file_handler(f, dtype=datatype,
byteorder=byteorder)
clt.print_msg("Loading seismic signal from %s... " %
fin_handler.filename)
signal = fin_handler.read()
clt.print_msg("Done\n")
# Generate output filename
if len(FILEIN) > 1:
filename_out = "%s%02.0i%s" % (basename, fileno, ext)
fileno += 1
clt.print_msg("Generating artificial signal in %s... " %
filename_out)
# Add background noise to signal
eq = generator.generate_noise(signal)
# Save outputs to file
if output_format == 'text':
fout_handler = rawfile.TextFile(filename_out, dtype=datatype,
byteorder=byteorder)
else:
fout_handler = rawfile.BinFile(filename_out, dtype=datatype,
byteorder=byteorder)
fout_handler.write(eq, header="Sample rate: %g Hz." % fs)
clt.print_msg("Done\n")
# If no input file is provided,
# generate a list of synthetic seismic signals.
else:
for i in xrange(n_events):
# Generate output filename
if n_events > 1:
filename_out = "%s%02.0i%s" % (basename, i, ext)
clt.print_msg("Generating artificial signal in %s... " %
filename_out |
psykokwak4/ydk-gen | sdk/python/core/tests/test_sanity_type_mismatch_errors.py | Python | apache-2.0 | 7,229 | 0.005948 | # ----------------------------------------------------------------
# Copyright 2016 Cisco Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------
"""test_sanity_type_mismatch_errors.py
Test type mismatch errors not covered by test_sanity_types.py
"""
from __future__ import absolute_import
import sys
import unittest
from ydk.models.ydktest import ydktest_sanity as ysanity
from ydk.models.ydktest import ydktest_sanity_types as ytypes
from ydk.providers import NetconfServiceProvider
from ydk.services import CRUDService
from ydk.errors import YPYModelError
from test_utils import assert_with_error
from test_utils import ParametrizedTestCase
from test_utils import get_device_info
test_invalid_class_assignment_int_pattern = "Invalid value '1' in '<ydk.models.ydktest.ydktest_sanity.[a-zA-Z\.]*One object at [0-9a-z]+>'"
test_invalid_class_assignment_str_pattern = "Invalid value 'haha' in '<ydk.models.ydktest.ydktest_sanity.[a-zA-Z\.]*One object at [0-9a-z]+>'"
test_invalid_class_assignment_identity_pattern = "Invalid value '<ydk.models.ydktest.ydktest_sanity_types.AnotherOne object at [0-9a-z]+>' in '<ydk.models.ydktest.ydktest_sanity.[a-zA-Z\.]*One object at [0-9a-z]+>'"
test_invalid_class_assignment_enum_pattern = "Invalid value 'ydk.types.Enum.YLeaf\(none\)' in '<ydk.models.ydktest.ydktest_sanity.[a-zA-Z\.]*One object at [0-9a-z]+>'"
test_invalid_class_assignment_ylist_pattern = "Invalid value '\[<ydk.models.ydktest.ydktest_sanity.[a-zA-Z\.]*Ldata object at [0-9a-z]+>\]' in '<ydk.models.ydktest.ydktest_sanity.[a-zA-Z\.]*One object at [0-9a-z]+>"
test_invalid_class_assignment_yleaflist_pattern = "Invalid value 'YLeafList\('llstring', \[0, 1, 2, 3, 4\]\)' in '<ydk.models.ydktest.ydktest_sanity.[a-zA-Z\.]*One object at [0-9a-z]+>'"
test_invalid_list_assignment_int_pattern = "Attempt to assign value of '1' to YList ldata. Please use list append or extend method."
test_invalid_list_assignment_entity_pattern = "Attempt to assign value of '<ydk.models.ydktest.ydktest_sanity.[a-zA-Z\.]*One object at [0-9a-z]+>' to YList ldata. Please use list append or extend method."
test_invalid_list_assignment_llist_pattern = "Attempt to assign value of 'YLeafList\('llstring', \[0, 1, 2, 3, 4\]\)' to YList ldata. Please use list append or extend method."
test_invalid_llist_assignment_int_pattern = "Invalid value '1' in 'llstring'"
test_invalid_llist_assignment_list_pattern = "Invalid value '\[<ydk.models.ydktest.ydktest_sanity.[a-zA-Z\.]*Ldata object at [0-9a-z]+>\]' in 'llstring'"
class SanityYang(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.ncc = NetconfServiceProvider(
cls.hostname,
cls.username,
cls.password,
cls.port,
cls.protocol,
cls.on_demand,
cls.common_cache,
cls.timeout)
cls.crud = CRUDService()
def setUp(self):
runner = ysanity.Runner()
self.crud.delete(self.ncc, runner)
def tearDown(self):
runner = ysanity.Runner()
self.crud.delete(self.ncc, runner)
@assert_with_error(test_invalid_class_assignment_int_pattern, YPYModelError)
def test_invalid_class_assignment_int(self):
runner = ysanity.Runner()
runner.one = 1
self.crud.create(self.ncc, runner)
@assert_with_error(test_invalid_class_assignment_str_pattern, YPYModelError)
def test_invalid_class_assignment_str(self):
runner = ysanity.Runner()
runner.one = "haha"
self.crud.create(self.ncc, runner)
@assert_with_error(test_invalid_class_assignment_identity_pattern, YPYModelError)
def test_invalid_class_assignment_identity(self):
runner = ysanity.Runner()
runner.one = ytypes.AnotherOne()
self.crud.create(self.ncc, runner)
@assert_with_error(test_invalid_class_assignment_enum_pattern, YPYModelError)
def test_invalid_class_assignment_enum(self):
runner = ysanity.Runner()
runner.one = ysanity.YdkEnumTest.none
self.crud.create(self.ncc, runner)
@assert_with_error(test_invalid_class_assignment_ylist_pattern, YPYModelError)
def test_invalid_class_assignment_ylist(self):
runner = ysanity.Runner()
elem = ysanity.Runner.OneList.Ldata()
elem.number, elem.name = 1, '1'
runner.one_list.ldata.append(elem)
runner.one = runner.one_list.ldata
self.crud.create(self.ncc, runner)
@assert_with_error(test_invalid_class_assignment_yleaflist_pattern, YPYModelError)
def test_invalid_class_assignment_yleaflist(self):
runner = ysanity.Runner()
runner.ytypes.built_in_t.llstring.extend([str(i) for i in range(5)])
runner.one = runner.ytypes.built_in_t.llstring
@assert_with_error(test_invalid_list_assignment_int_pattern, YPYModelError)
def test_invalid_list_assignment_int(self):
runner = ysanity.Runner()
runner.one_list.ldata = 1
self.crud.create(self.ncc, runner)
@assert_with_error(test_invalid_list_assignment_entity_pattern, YPYModelError)
def test_invalid_list_assignment_entity(self):
runner = ysanity.Runner()
runner.one_list.ldata = runner.one
self.crud.crud(self.ncc, runner)
@assert_with_error(test_invalid_list_assignment_llist_pattern, YPYModelError)
def test_invalid_list_assignment_llist(self):
runner = ysanity.Runner()
runner.ytypes.built_in_t.llstring.extend([str(i) for i in range(5)])
runner.one_list.ldata = runner.ytypes.built_in_t.llstring
self.crud.crud(self.ncc, runner)
@assert_with_error(test_invalid_llist_assignment_int_pattern, YPYModelError)
def test_invalid_llist_assignment_int(self):
runner = ysanity.Runner()
runner.ytypes.built_in_t.llstring = 1
self.crud.create(self.ncc, runner)
@assert_with_error(test_invalid_llist_assignment_list_pattern, YPYModelError)
def test_invalid_llist_assignment_list(self):
runner = ysanity.Runner()
elem = ysanity.Runner.OneList.Ldata()
elem.number, elem.name = 1, '1'
runner.one_list.ldata.append(elem)
runner.ytypes.built_in_t.llstring = runner.one_list.ldata
self.crud.create(self.ncc, runner)
if __name__ == '__main__':
device, non_demand, common_cache, timeout = get_device_info()
suite = unittest.TestSuite()
suite.addTest(Param | etrizedTestCase.parametrize(
SanityYang,
de | vice=device,
non_demand=non_demand,
common_cache=common_cache,
timeout=timeout))
ret = not unittest.TextTestRunner(verbosity=2).run(suite).wasSuccessful()
sys.exit(ret)
|
sadig/DC2 | components/dc2-lib/dc2/lib/logging/__init__.py | Python | gpl-2.0 | 890 | 0.001125 | # -*- coding: utf-8 -*-
#
# (DC)² - DataC | enter Deployment Control
# Copyright (C) 2010, 2011, 2012, 2013, 2014 Stephan Adig <sh@sourcecode.de>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCH | ANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
from applogger import AppLogger # noqa
|
nadrees/PyEuler | 0004.py | Python | unlicense | 416 | 0.00241 | '''
A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers is 9009 = 91 × 99.
Find the largest palindrome made from the product of two 3-digit numbers.
'''
nums = range(9 | 99, 99, -1)
allProducts = [x * y for x in nums for y in nums]
palindromeProducts = [p for p in | allProducts if str(p) == str(p)[::-1]]
answer = max(palindromeProducts)
print(answer)
|
ChainsAutomation/chains | lib/chains/common/addict.py | Python | gpl-2.0 | 7,719 | 0.000259 | from inspect import isgenerator
import re
import copy
# version: 0.4.0
# author: Mats Julian Olsen
# license: MIT
# https://github.com/mewwts/addict/
class Dict(dict):
"""
Dict is a subclass of dict, which allows you to get AND SET(!!)
items in the dict using the attribute syntax!
When you previously had to write:
my_dict = {'a': {'b': {'c': [1, 2, 3]}}}
you can now do the same simply by:
my_Dict = Dict()
my_Dict.a.b.c = [1, 2, 3]
Or for instance, if you'd like to add some additional stuff,
where you'd with the normal dict would write
my_dict['a']['b']['d'] = [4, 5, 6],
you may now do the AWESOME
my_Dict.a.b.d = [4, 5, 6]
instead. But hey, you can always use the same syntax as a regular dict,
however, this will not raise TypeErrors or AttributeErrors at any time
while you try to get an item. A lot like a defaultdict.
"""
def __init__(self, *args, **kwargs):
"""
If we're initialized with a dict, make sure we turn all the
subdicts into Dicts as well.
"""
for arg in args:
if not arg:
continue
elif isinstance(arg, dict):
for key, val in arg.items():
self[key] = val
elif isinstance(arg, tuple) and (not isinstance(arg[0], tuple)):
self[arg[0]] = arg[1]
elif isinstance(arg, (list, tuple)) or isgenerator(arg):
for key, val in arg:
self[key] = val
else:
raise TypeError("Dict does not understand "
"{0} types".format(type(arg)))
for key, val in kwargs.items():
self[key] = val
def __setattr__(self, name, value):
"""
setattr is called when the syntax a.b = 2 is used to set a value.
"""
if hasattr(Dict, name):
raise AttributeError("'Dict' object attribute "
"'{0}' is read-only".format(name))
else:
self[name] = value
def __setitem__(self, name, value):
"""
This is called when trying to set a value of the Dict using [].
E.g. some_instance_of_Dict['b'] = val. If 'val
"""
value = self._hook(value)
super(Dict, self).__setitem__(name, value)
@classmethod
def _hook(cls, item):
"""
Called to ensure that each dict-instance that are being set
is a addict Dict. Recurses.
"""
if isinstance(item, dict):
return cls(item)
elif isinstance(item, (list, tuple)):
return type(item)(cls._hook(elem) for elem in item)
return item
def __getattr__(self, item):
return self.__getitem__(item)
def __getitem__(self, name):
"""
This is called when the Dict is accessed by []. E.g.
some_instance_of_Dict['a'];
If the name is in the dict, we return it. Otherwise we set both
the attr and item to a new instance of Dict.
"""
if name not in self:
self[name] = Dict()
return super(Dict, self).__getitem__(name)
def __delattr__(self, name):
""" Is invoked when del some_addict.b is called. """
del self[name]
_re_pattern = re.compile('[a-zA-Z_][a-zA-Z0-9_]*')
def __dir__(self):
"""
Return a list of addict object attributes.
This includes key names of any dict entries, filtered to the subset of
valid attribute names (e.g. alphanumeric strings beginning with a
letter or underscore). Also includes attributes of parent dict class.
"""
dict_keys = []
for k in self.keys():
if isinstance(k, str):
m = self._re_pattern.match(k)
if m:
dict_keys.append(m.string)
obj_attrs = list(dir(Dict))
return dict_keys + obj_attrs
def _ipython_display_(self):
print(str(self)) # pragma: no cover
def _repr_html_(self):
return str(self)
def prune(self, prune_zero=False, prune_empty_list=True):
"""
Removes all empty Dicts and falsy stuff inside the Dict.
E.g
>>> a = Dict()
>>> a.b.c.d
{}
>>> a.a = 2
>>> a
{'a': 2, 'b': {'c': {'d': {}}}}
>>> a.prune()
>>> a
{'a': 2}
Set prune_zero=True to remove 0 values
E.g
>>> a = Dict()
>>> a.b.c.d = 0
>>> a.prune(prune_zero=True)
>>> a
{}
Set prune_empty_list=False to have them persist
E.g
>>> a = Dict({'a': []})
>>> a.prune()
>>> a
{}
>>> a = Dict({'a': []})
>>> a.prune(prune_empty_list=False)
>>> a
{'a': []}
"""
for key, val in list(self.items()):
if ((not val) and ((val != 0) or prune_zero) and
not isinstance(val, list)):
del self[key]
elif isinstance(val, Dict):
val.prune(prune_zero, prune_empty_list)
if not val:
del self[key]
elif isinstance(val, (list, tuple)):
new_iter = self._prune_iter(val, prune_zero, prune_empty_list)
if (not new_iter) and prune_empty_list:
del s | elf[key]
else:
if isins | tance(val, tuple):
new_iter = tuple(new_iter)
self[key] = new_iter
@classmethod
def _prune_iter(cls, some_iter, prune_zero=False, prune_empty_list=True):
new_iter = []
for item in some_iter:
if item == 0 and prune_zero:
continue
elif isinstance(item, Dict):
item.prune(prune_zero, prune_empty_list)
if item:
new_iter.append(item)
elif isinstance(item, (list, tuple)):
new_item = type(item)(
cls._prune_iter(item, prune_zero, prune_empty_list))
if new_item or not prune_empty_list:
new_iter.append(new_item)
else:
new_iter.append(item)
return new_iter
def to_dict(self):
""" Recursively turn your addict Dicts into dicts. """
base = {}
for key, value in self.items():
if isinstance(value, type(self)):
base[key] = value.to_dict()
elif isinstance(value, (list, tuple)):
base[key] = type(value)(
item.to_dict() if isinstance(item, type(self)) else
item for item in value)
else:
base[key] = value
return base
def copy(self):
"""
Return a disconnected deep copy of self. Children of type Dict, list
and tuple are copied recursively while values that are instances of
other mutable objects are not copied.
"""
return Dict(self.to_dict())
def __deepcopy__(self, memo):
""" Return a disconnected deep copy of self. """
y = self.__class__()
memo[id(self)] = y
for key, value in self.items():
y[copy.deepcopy(key, memo)] = copy.deepcopy(value, memo)
return y
def update(self, d):
""" Recursively merge d into self. """
for k, v in d.items():
if ((k not in self) or
(not isinstance(self[k], dict)) or
(not isinstance(v, dict))):
self[k] = v
else:
self[k].update(v)
|
matejm/advent-of-code-2016 | day19.py | Python | mit | 829 | 0.001206 | number = [i for i in range(1, 3001330+1)]
# number = [i for i in range(1, 10)]
number2 = number[:]
last = len(number) % 2 != 0
while len(number) > 1:
next_last = len(number) % 2 != last
number = [j for i, j in enumerate(number) if i % 2 != last]
last = next_last
print('#1', number[0])
number = number2
while len(number) > 1:
pop = set()
last = 0
for i in range(len(number) // 2):
last = number[i]
pop.add(number[(2 * i + (len(number) - i) // 2) % len(number)])
number = [i for i in number if i not in pop]
if len(number) == 1: break
pop = set()
start = number.index(last) + 1
for i in r | ange(start, len(number)):
pop.add(number[(i + (len(number) + i - start) // 2) % len(number)])
num | ber = [i for i in number if i not in pop]
print('#2', number[0])
|
cydenix/OpenGLCffi | OpenGLCffi/GL/EXT/KHR/debug.py | Python | mit | 2,870 | 0.010453 | from OpenGLCffi.GL import params
@params(api='gl', prms=['source', 'type', 'severity', 'count', 'ids', 'enabled'])
def glDebugMessageControl(source, type, severity, count, ids, enabled):
pass
@params(api='gl', prms=['source', 'type', 'id', 'severity', 'length', 'buf'])
def glDebugMessageInsert(source, type, id, severity, length, buf):
pass
@params(api='gl', prms=['callback', 'userParam'])
def glDebugMessageCallback(callback, userParam):
pass
@params(api='gl', prms=['count', 'bufSize', 'sources', 'types', 'ids', 'severities', 'lengths', 'messageLog'])
def glGetDebugMessageLog(count, bufSize, sources, types, ids, severities, lengths, messageLog):
pass
@params(api='gl', prms=['source', 'id', 'length', 'message'])
def glPushDebugGroup(source, id, length, message):
pass
@params(api='gl', prms=[])
def glPopDebugGroup():
pass
@params(api='gl', prms=['identifier', 'name', 'length', 'label'])
def glObjectLabel(identifier, name, length, label):
pass
@params(api='gl', prms=['identifier', 'name', 'bufSize', 'length', 'label'])
def glGetObjectLabel(identifier, name, bufSize, length, label):
pass
@params(api='gl', prms=['ptr', 'length', 'label'])
def glObjectPtrLabel(ptr, length, label):
pass
@params(api='gl', prms=['ptr', 'bufSize', 'length', 'label'])
def glGetObjectPtrLabel(ptr, bufSize, length, label):
pass
@params(api='gl', prms=['pname', 'params'])
def glGetPointerv(pname, params):
pass
@params(api='gl', prms=['source', 'type', 'severity', 'count', 'ids', 'enabled'])
def glDebugMessageControlKHR(so | urce, type, severity, count, ids, enabled):
pass
@params(api='gl', prms=['source', 'type', 'id', 'severity', 'length', 'buf'])
def glDebugMessageInsertKHR(source, type, id, severity, length, buf):
pass
@params(api='gl', prms=['callback', 'userParam'])
def glDebugMessageCallbackKHR(callback, user | Param):
pass
@params(api='gl', prms=['count', 'bufSize', 'sources', 'types', 'ids', 'severities', 'lengths', 'messageLog'])
def glGetDebugMessageLogKHR(count, bufSize, sources, types, ids, severities, lengths, messageLog):
pass
@params(api='gl', prms=['source', 'id', 'length', 'message'])
def glPushDebugGroupKHR(source, id, length, message):
pass
@params(api='gl', prms=[])
def glPopDebugGroupKHR():
pass
@params(api='gl', prms=['identifier', 'name', 'length', 'label'])
def glObjectLabelKHR(identifier, name, length, label):
pass
@params(api='gl', prms=['identifier', 'name', 'bufSize', 'length', 'label'])
def glGetObjectLabelKHR(identifier, name, bufSize, length, label):
pass
@params(api='gl', prms=['ptr', 'length', 'label'])
def glObjectPtrLabelKHR(ptr, length, label):
pass
@params(api='gl', prms=['ptr', 'bufSize', 'length', 'label'])
def glGetObjectPtrLabelKHR(ptr, bufSize, length, label):
pass
@params(api='gl', prms=['pname', 'params'])
def glGetPointervKHR(pname, params):
pass
|
jthorniley/pyusb2ax | example.py | Python | gpl-2.0 | 1,401 | 0.009993 | import | usb2ax
import time
import math
import sys
with usb2ax.Controller(fix_sync_read_delay = True) as dxl:
servo_list = dxl.servo_list
if len(servo_list) == 0:
raise "Nothing connected..."
sys.exit()
print "Servo: \t" + "\t".join( [str(x) for x in servo_list] ) + "\tRead rate (Hz)\tNumber of errors"
buflen = 1000
freq_buffer = [0.0] * buflen
| i = 0
pos_data = [0]*len(servo_list)
n_read_errors = 0
try:
t0 = time.time()
while True:
pos = math.sin(t0*math.pi)*50.0
pos = int(512+pos)
dxl.sync_write(servo_list,"goal_position",[pos]*len(servo_list))
done = False
while not done:
try:
pos_data = dxl.sync_read(servo_list,"present_position")
done = True
except usb2ax.ReadError, e:
n_read_errors += 1
t1 = time.time()
freq = 1.0/(t1-t0)
t0 = t1
freq_buffer[i] = freq
mean_freq = sum(freq_buffer)/float(buflen)
i += 1
i = i % buflen
pos_string = "".join(["{:<8d}".format(x) for x in pos_data])
sys.stdout.write("\r\t{}{:<8.2f}\t{:<8d}".format(pos_string, mean_freq, n_read_errors) )
sys.stdout.flush()
except KeyboardInterrupt, e:
print ""
|
mumuwoyou/vnpy | vn.trader/vtFunction.py | Python | mit | 1,565 | 0.008578 | # encoding: UTF-8
"""
包含一些开放中常用的函数
"""
import decimal
import json
import os
from datetime import datetime
MAX_NUMBER = 10000000000000
MAX_DECIMAL = 4
#----------------------------------------------------------------------
def safeUnicode(value):
"""检查接口数据潜在的错误,保证转化为的字符串正确"""
# 检查是数字接近0时会出现的浮点数上限
if type(value) is int or type(value) is float:
if value > MAX_NUMBER:
value = 0
# 检查防止小数点位过多
if type(value) is float:
d = decimal.Decimal(str(value))
if abs(d.as_tuple().exponent) > MAX_DECIMAL:
value = round(value, ndigits=MAX_DECIMAL)
return unicode(value)
#----------------------------------------------------------------------
def loadMongoSetting():
"""载入MongoDB数据库的配置"""
fileName = 'VT_setting.json'
path = os.path.abspath(os.path.dirname(__file__))
fileName = os.path.join(path, fileName)
try:
f = file(fileName)
setting = json.load(f)
host = setting['mongoHost']
port = setting['mongoPort']
log | ging = setting['mongoLogging']
except:
host = 'localhost'
port = 27017
logging = False
|
return host, port, logging
#----------------------------------------------------------------------
def todayDate():
"""获取当前本机电脑时间的日期"""
return datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
|
nchaparr/Geospatial-Analysis-with-Python | 1138_08_01-ndvi.py | Python | cc0-1.0 | 3,886 | 0.013124 | """
Output a normalized vegetative index
"""
import gdal, gdalnumeric, ogr
import Image, ImageDraw
def imageToArray(i):
"""
Converts a Python Imaging Library
array to a gdalnumeric image.
"""
a=gdalnumeric.numpy.fromstring(i.tostring(),'b')
a.shape=i.im.size[1], i.im.size[0]
return a
def world2Pixel(geoMatrix, x, y):
"""
Uses a gdal geomatrix (gdal.GetGeoTransform())
to calculate the pixel location of a
geospatial coordinate
"""
ulX = geoMatrix[0]
ulY = geoMatrix[3]
xDist = geoMatrix[1]
yDist = geoMatrix[5]
rtnX = geoMatrix[2]
rtnY = geoMatrix[4]
pixel = int((x - ulX) / xDist)
line = int((ulY - y) / xDist)
return (pixel, line)
# Multispectral image used
# to create the NDVI. Must
# have red and infrared
# bands
source = "farm.tif"
# Output geotiff file name
target = "ndvi.tif"
# Load the source data as a gdalnumeric array
srcArray = gdalnumeric.LoadFile(source)
# Also load as a gdal image to
# get geotransform (world file) info
srcImage = gdal.Open(source)
geoTrans = srcImage.GetGeoTransform()
# Red and infrared (or near infrared) bands
r = srcArray[1]
ir = srcArray[2]
## Clip a field out of the bands using a
## field boundary shapefile
# Create an OGR layer from a Field boundary shapefile
field = ogr.Open("field.shp")
# Must define a "layer" to keep OGR happy
lyr = field.GetLayer("field")
# Only one polygon in this shapefile
poly = lyr.GetNextFeature()
# Convert the layer extent to image pixel coordinates
minX, maxX, minY, maxY = lyr.GetExtent()
ulX, ulY = world2Pixel(geoTrans, minX, maxY)
lrX, lrY = world2Pixel(geoTrans, maxX, minY)
# Calculate the pixel size of the new image
pxWidth = int(lrX - ulX)
pxHeight = int(lrY - ulY)
# Create a blank image of the correct size
# that will serve as our mask
clipped = gdalnumeric.numpy.zeros((3, pxHeight, pxWidth), \
gdalnumeric.numpy.uint8)
#mmask = gdalnumeric.zeros((3, pxHeight, pxWidth), gdalnumeric.UnsignedInt8)
#rgb = rgb.astype(gdalnumeric.UnsignedInt8)
rClip = r[ulY:lrY, ulX:lrX]
irClip = ir[ulY:lrY, ulX:lrX]
# Create a new geomatrix for the image
geoTrans = list(geoTrans)
geoTrans[0] = minX
geoTrans[3] = maxY
# Map points to pixels for drawing
# the field boundary on a blank
# 8-bit, black and white, mask image.
points = []
pixels = []
# Grab the polygon geometry
geom = poly.GetGeometryRef()
pts = geom.GetGeometryRef(0)
# Loop through geometry and turn
# the points into an easy-to-manage
# Python list
for p in range(pts.GetPointCount()):
points.append((pts.GetX(p), pts.GetY(p)))
# Loop through the points and map to pixels.
# Append the pixels to a pixel list
for p in points:
pixels.append(world2Pixel(geoTrans, p[0], p[1]))
# Create the raster polygon image
rasterPoly = Image.new("L", (pxWidth, pxHeight), 1)
# Create a PIL drawing object
rasterize = ImageDraw.Draw(rasterPoly)
# Dump the pixels to the image
rasterize.polygon(pixels, 0)
# Hand the image back to gdal/gdalnumeric
# so we can use it as an array mask
mask = imageToArray(rasterPoly)
# Clip the red band using the mask
rClip = gdalnumeric.numpy.choose(mask, \
(rClip, 0)).astype(gdalnumeric.numpy.uint8)
# Clip the infrared band using the mask
irClip = gdalnumeric.numpy.choose(mask, \
(irClip, 0)).astype(gdalnumeric.numpy.uint8)
# We don't care about numpy warnings
# due to NaN values from clipping
gdalnumeric.numpy.seterr(all="ignore")
# NDVI equation: (infrared - red) / (infrared + red)
# *1.0 converts values to floats,
# +1.0 prevents ZeroDivis | ionErrors
ndvi = 1.0 * (irClip - rClip) / irClip + rClip + 1.0
# Remove any NaN values from the final product
ndvi = gdalnumeric.numpy.nan_to_num(ndvi)
# Save ndvi as tiff
gdalnumeric.SaveArray(ndvi, target, \
format="GTiff | ", prototype=source)
|
damonchen/chan | chan/core/templates/uwsgi_handler.py | Python | bsd-2-clause | 429 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import click
from {{project}} import app as application
|
@click.com | mand()
@click.option('--debug/--no-debug', default=True,
envvar='REPO_DEBUG')
@click.option('--host', default='0.0.0.0')
@click.option('--port', default=5000)
def main(debug, host, port):
application.debug = debug
application.run(host=host, port=port)
if __name__ == '__main__':
main()
|
renzon/gae-continuous-delivery | test/testloader.py | Python | mit | 635 | 0.001575 | #!/usr/bin/env python
# c | oding: utf-8
import unittest
import sys
import os
PROJECT_PATH = os.path.sep.join(os.path.abspath(__file__).split(os.path.sep)[:-2])
ROOT_PATH = os.path.dirname(__file__)
if __name__ == '__main__':
if 'GAE_SDK' in os.environ:
SDK_PATH = os.environ['GAE_SDK']
sys.path.insert(0, SDK_PATH)
import dev_appserver
dev_appse | rver.fix_sys_path()
sys.path.append(os.path.join(PROJECT_PATH, 'src'))
tests = unittest.TestLoader().discover(ROOT_PATH, "*tests.py")
result = unittest.TextTestRunner().run(tests)
if not result.wasSuccessful():
sys.exit(1)
|
rickyHong/dqn-repl | sandbox/old/imageTest.py | Python | gpl-3.0 | 1,421 | 0.009148 | f = open("pixels.dat", "r")
pixs = f.readline()
f.close()
print len(pixs)
from PIL import Image
import numpy as np
img = Image.new('RGB', (160, 210), "black") # create a new black image
pixels = img.load() # create the pixel map
# Load the hardcoded grayscale array
from grayscale import getGrayscaleArray
colMat = getGrayscaleArray()
for i in range(len(pixs)/2):
row = i % 160
column = i/160
hex1 = int(pixs[i*2], 16)
# Division by 2 because: http://en.wikipedia.org/wiki/List_of_video_game_console_palettes
hex2 = int(pixs[i*2+1], 16)/2
temp = int(colMat[hex2, hex1])
pixels[row, column] = (temp, temp, temp)
img.show()
# Example 1: take one PIL.Image file, preprocess and get its pixel array
from preprocessing import preprocessImage
img2 = preprocessImage(img)
pixels = img2.load()
# Example 2: take a sequence that DOESN'T contain actions and preprocess the images in-place
from preprocessing import preprocessSequenceWithActions
sequence = [img.copy() | , 45, img.copy(), 'thisdoesntmatter', img.copy(), 'this neither'] #,deepcopy(img),'thisdoesntmatter',deepcopy(img),deepcopy(img)]
sequence = preprocessSequenceWithActions(sequence)
# Example 3: take a sequence that DOES contain actions and preprocess the images in-place
from preprocessing import preprocessSequenceNoActions
sequence = [ | img.copy(), img.copy(), img.copy()]
sequence = preprocessSequenceNoActions(sequence)
|
tsaoyu/D3HRE | D3HRE/core/battery_models.py | Python | gpl-3.0 | 18,409 | 0.002988 | import numpy as np
def min_max_model(power, use, battery_capacity):
"""
Minimal maximum battery model, obsoleted
:param power: Pandas TimeSeries, total power from renewable system
:param use: float, unit W fixed load of the power system
:param battery_capacity: float, unit Wh battery capacity
:return: list, energy history in battery
"""
power = power.tolist()
energy = 0
energy_history = []
for p in power:
energy = min(battery_capacity, max(0, energy + (p - use) * 1))
energy_history.append(energy)
return energy_history
def soc_model_fixed_load(
power,
use,
battery_capacity,
depth_of_discharge=1,
discharge_rate=0.005,
battery_eff=0.9,
discharge_eff=0.8,
):
"""
Battery state of charge model with fixed load. (Obsolete)
:param power: Pandas TimeSeries of total power from renewable system
:param use: float unit W fixed load of the power system
:param battery_capacity: float unit Wh battery capacity
:param depth_of_discharge: float 0 to 1 maximum allowed discharge depth
:param discharge_rate: self discharge rate
:param battery_eff: optional 0 to 1 battery energy store efficiency default 0.9
:param discharge_eff: battery discharge efficiency 0 to 1 default 0.8
:return: tuple SOC: state of charge, energy history: E in battery,
unmet_history: unmet energy history, waste_history: waste energy history
"""
DOD = depth_of_discharge
power = power.tolist()
use_history = []
waste_history = []
unmet_history = []
energy_history = []
energy = 0
for p in power:
if p >= use:
use_history.append(use)
unmet_history.append(0)
energy_new = energy * (1 - discharge_rate) + (p - use) * battery_eff
if energy_new < battery_capacity:
| energy = energy_new # battery energy got update
waste_history.append(0)
else:
waste_history.append(p - use)
energy = energy
elif p < use:
energy_new = energy * (1 - discharge_rate) + (p - use) / discharge_eff
if energy_new > (1 - DOD) * battery_capacity:
energy = energy_new
unmet_history.append(0)
| waste_history.append(0)
use_history.append(use)
elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity:
energy = energy * (1 - discharge_rate) + p * battery_eff
unmet_history.append(use - p)
use_history.append(0)
waste_history.append(0)
else:
unmet_history.append(use - p)
use_history.append(0)
waste_history.append(p)
energy = energy
energy_history.append(energy)
if battery_capacity == 0:
SOC = np.array(energy_history)
else:
SOC = np.array(energy_history) / battery_capacity
return SOC, energy_history, unmet_history, waste_history, use_history
class Battery:
"""
A simple finite state based energy flow battery model.
"""
def __init__(self, capacity, config={}):
"""
Initialise the battery with a given capacity and configuration.
:param capacity: float, unit Wh
:param config: options including DOD, depth of discharge; sigma, self-discharge rate; eta_in, charge efficiency;
eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge; where all values shall between 0
and 1
"""
self.capacity = capacity
self.config = config
self.set_parameters()
def set_parameters(self):
"""
Setup the parameters using the config file, options including DOD, depth of discharge; sigma, self-discharge rate;
eta_in, charge efficiency; eta_out, discharge efficiency; init_charge, percentage of the battery pre-charge;
where all values shall between 0 and 1.
"""
try:
self.depth_of_discharge = self.config['simulation']['battery']['DOD']
self.discharge_rate = self.config['simulation']['battery']['sigma']
self.battery_eff = self.config['simulation']['battery']['eta_in']
self.discharge_eff = self.config['simulation']['battery']['eta_out']
self.init_charge = self.config['simulation']['battery']['B0']
except KeyError:
print('Parameter is not found in config file, default values are used.')
self.depth_of_discharge = 1
self.discharge_rate = 0.005
self.battery_eff = 0.9
self.discharge_eff = 0.8
self.init_charge = 1
def run(self, power, use):
"""
Run the battery model with a list of power generation and usage.
:param power: list, power generation unit in W
:param use: list, power usage unit in W
:return: None
"""
DOD = self.depth_of_discharge
battery_capacity = self.capacity
discharge_rate = self.discharge_rate
discharge_eff = self.discharge_eff
battery_eff = self.battery_eff
use_history = []
waste_history = []
unmet_history = []
energy_history = []
SOC = []
energy = self.init_charge * self.capacity
for p, u in zip(power, use):
if p >= u:
use_history.append(u)
unmet_history.append(0)
energy_new = energy * (1 - discharge_rate) + (p - u) * battery_eff
if energy_new < battery_capacity:
energy = energy_new # battery energy got update
waste_history.append(0)
else:
waste_history.append(p - u)
energy = energy
elif p < u:
energy_new = energy * (1 - discharge_rate) + (p - u) / discharge_eff
if energy_new > (1 - DOD) * battery_capacity:
energy = energy_new
unmet_history.append(0)
waste_history.append(0)
use_history.append(u)
elif energy * (1 - discharge_rate) + p * battery_eff < battery_capacity:
energy = energy * (1 - discharge_rate) + p * battery_eff
unmet_history.append(u - p)
use_history.append(0)
waste_history.append(0)
else:
unmet_history.append(u - p)
use_history.append(0)
waste_history.append(p)
energy = energy
energy_history.append(energy)
SOC.append(energy / battery_capacity)
self.SOC = SOC
self.energy_history = energy_history
self.unmet_history = unmet_history
self.waste_history = waste_history
self.use_history = use_history
def battery_history(self):
"""
Return the history of the battery.
:return: np array, the SOC, energy in the battery, unmet power supply, wasted power and the supplied power unit in W
"""
history = np.vstack(
(
np.array(self.SOC),
np.array(self.energy_history),
np.array(self.unmet_history),
np.array(self.waste_history),
np.array(self.use_history),
)
)
return history
def lost_power_supply_probability(self):
"""
Return the lost power supply probability (LPSP) using the battery history.
:return: float, LPSP
"""
LPSP = 1 - self.unmet_history.count(0) / len(self.energy_history)
return LPSP
class Battery_managed:
"""
Battery managed is a the basic class for the demand load controllable battery model.
"""
def __init__(self, capacity, config={}):
"""
:param capacity: float, unit Wh
:param config: options including DOD, depth of discharge; sigma, self-discharge rate; eta_in, charge efficiency;
|
CatoTH/OpenSlides | server/openslides/global_settings.py | Python | mit | 3,385 | 0.000296 | import os
from openslides.utils.plugins import collect_plugins
MODULE_DIR = os.path.realpath(os.path.dirname(os.path.abspath(__file__)))
# This is not set to the docker environment
OPENSLIDES_USER_DATA_DIR = "/app/personal_data/var"
# Application definition
INSTALLED_APPS = [
"openslides.core",
"openslides.users",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.staticfiles",
"rest_framework",
"openslides.agenda",
"openslides.topics",
"openslides.motions",
"openslides.assignments",
"openslides.mediafiles",
"openslides.chat",
]
INSTALLED_PLUGINS = collect_plugins() # Adds all automatically collected plugins
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.locale.LocaleMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
"openslides.utils.autoupdate_bundle.AutoupdateBundleMiddleware",
]
ROOT_URLCONF = "openslides.urls"
ALLOWED_HOSTS = ["*"]
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
}
]
SESSION_ENGINE = "openslides.utils.sessions"
# Email
# https://docs.djangoproject.com/en/1.10/topics/email/
EMAIL_BACKEND = "django.core.mail.backends.smtp.EmailBackend"
EMAIL_TIMEOUT = (
5 # Timeout in seconds for blocking operations like the connection attempt
)
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = "en"
LANGUAGES = (
("en", "English"),
("de", "Deutsch"),
("fr", "Français"),
("es", "Español"),
("pt", "Português"),
("cs", "Český"),
("ru", "русский"),
)
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
LOCALE_PATHS = [os.path.join(MODULE_DIR, "locale")]
# Static files (CSS, JavaScript, Images)
STATIC_URL = "/static/"
STATICFILES_DIRS = [os.path.join(MODULE_DIR, "static")] + [
os.path.join(OPENSLIDES_USER_DATA_DIR, "static")
]
# Static files (CSS, JavaScript, Images)
STATIC_ROOT = os.path.join(OPENSLIDES_ | USER_DATA_DIR, "collected-static")
# Files
# https://docs.djangoproject.com/en/2.2/topics/files/
MEDIA_ROOT = os.path.join(OPENSLIDES_USER_DATA_DIR, "media", "")
MEDIA_URL = "/media/"
# Sessions and user authentication
# https://docs.djangoproject.com/en/2.2/topics/http/sess | ions/
# https://docs.djangoproject.com/en/2.2/topics/auth/
AUTH_USER_MODEL = "users.User"
AUTH_GROUP_MODEL = "users.Group"
AUTHENTICATION_BACKENDS = ["openslides.utils.auth_backend.ModelBackend"]
SESSION_COOKIE_NAME = "OpenSlidesSessionID"
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
CSRF_COOKIE_NAME = "OpenSlidesCsrfToken"
CSRF_COOKIE_AGE = None
PASSWORD_HASHERS = [
"django.contrib.auth.hashers.PBKDF2PasswordHasher",
"django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher",
"django.contrib.auth.hashers.Argon2PasswordHasher",
"django.contrib.auth.hashers.BCryptSHA256PasswordHasher",
"django.contrib.auth.hashers.BCryptPasswordHasher",
]
# Enable updating the last_login field for users on every login.
ENABLE_LAST_LOGIN_FIELD = False
|
rodrigozn/CW-Shop | tests/test_checkout.py | Python | bsd-3-clause | 7,657 | 0.00222 | import pytest
from django.conf import settings
from django.contrib.auth.models import AnonymousUser |
from mock import MagicMock, Mock
from prices import Price
from saleor.checkout import views
from saleor.checkout.core import STORAGE_SESSION_KEY, Checkout
from saleor.shipp | ing.models import ShippingMethodCountry
from saleor.userprofile.models import Address
def test_checkout_version():
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
storage = checkout.for_storage()
assert storage['version'] == Checkout.VERSION
@pytest.mark.parametrize('storage_data, expected_storage', [
({'version': Checkout.VERSION, 'new': 1}, {'version': Checkout.VERSION, 'new': 1}),
({'version': 'wrong', 'new': 1}, {'version': Checkout.VERSION}),
({'new': 1}, {'version': Checkout.VERSION}),
({}, {'version': Checkout.VERSION}),
(None, {'version': Checkout.VERSION}),
])
def test_checkout_version_with_from_storage(storage_data, expected_storage):
checkout = Checkout.from_storage(
storage_data, Mock(), AnonymousUser(), 'tracking_code')
storage = checkout.for_storage()
assert storage == expected_storage
def test_checkout_clear_storage():
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
checkout.storage['new'] = 1
checkout.clear_storage()
assert checkout.storage is None
assert checkout.modified is True
def test_checkout_is_shipping_required():
cart = Mock(is_shipping_required=Mock(return_value=True))
checkout = Checkout(cart, AnonymousUser(), 'tracking_code')
assert checkout.is_shipping_required is True
def test_checkout_deliveries():
partition = Mock(
get_total=Mock(return_value=Price(10, currency=settings.DEFAULT_CURRENCY)),
get_price_per_item=Mock(return_value=Price(10, currency=settings.DEFAULT_CURRENCY)))
def f():
yield partition
partition.__iter__ = Mock(return_value=f())
cart = Mock(partition=Mock(return_value=[partition]),
currency=settings.DEFAULT_CURRENCY)
checkout = Checkout(
cart, AnonymousUser(), 'tracking_code')
deliveries = list(checkout.deliveries)
assert deliveries[0][1] == Price(0, currency=settings.DEFAULT_CURRENCY)
assert deliveries[0][2] == partition.get_total()
assert deliveries[0][0][0][0] == partition
def test_checkout_deliveries_with_shipping_method(monkeypatch):
shipping_cost = 5
items_cost = 5
partition = Mock(
is_shipping_required=MagicMock(return_value=True),
get_total=Mock(return_value=Price(items_cost, currency=settings.DEFAULT_CURRENCY)),
get_price_per_item=Mock(return_value=Price(items_cost, currency=settings.DEFAULT_CURRENCY)))
def f():
yield partition
partition.__iter__ = Mock(return_value=f())
cart = Mock(partition=Mock(return_value=[partition]),
currency=settings.DEFAULT_CURRENCY)
shipping_method_mock = Mock(get_total=Mock(return_value=Price(shipping_cost, currency=settings.DEFAULT_CURRENCY)))
monkeypatch.setattr(Checkout, 'shipping_method', shipping_method_mock)
checkout = Checkout(
cart, AnonymousUser(), 'tracking_code')
deliveries = list(checkout.deliveries)
assert deliveries[0][1] == Price(shipping_cost, currency=settings.DEFAULT_CURRENCY)
assert deliveries[0][2] == Price(items_cost + shipping_cost, currency=settings.DEFAULT_CURRENCY)
assert deliveries[0][0][0][0] == partition
@pytest.mark.parametrize('user, shipping', [
(Mock(default_shipping_address='user_shipping'), 'user_shipping'),
(AnonymousUser(), None),
])
def test_checkout_shipping_address_with_anonymous_user(user, shipping):
checkout = Checkout(Mock(), user, 'tracking_code')
assert checkout.shipping_address == shipping
@pytest.mark.parametrize('address_objects, shipping', [
(Mock(get=Mock(return_value='shipping')), 'shipping'),
(Mock(get=Mock(side_effect=Address.DoesNotExist)), None),
])
def test_checkout_shipping_address_with_storage(address_objects, shipping, monkeypatch):
monkeypatch.setattr('saleor.checkout.core.Address.objects', address_objects)
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
checkout.storage['shipping_address'] = {'id': 1}
assert checkout.shipping_address == shipping
def test_checkout_shipping_address_setter():
address = Address(first_name='Jan', last_name='Kowalski')
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
checkout.shipping_address = address
assert checkout.storage['shipping_address'] == {
'city': u'', 'city_area': u'', 'company_name': u'', 'country': '', 'phone': u'',
'country_area': u'', 'first_name': 'Jan', 'id': None, 'last_name': 'Kowalski',
'postal_code': u'', 'street_address_1': u'', 'street_address_2': u''}
@pytest.mark.parametrize('shipping_address, shipping_method, value', [
(Mock(country=Mock(code='PL')),
Mock(country_code='PL', __eq__=lambda n, o: n.country_code == o.country_code),
Mock(country_code='PL')),
(Mock(country=Mock(code='DE')), Mock(country_code='PL'), None),
(None, Mock(country_code='PL'), None),
])
def test_checkout_shipping_method(shipping_address, shipping_method, value, monkeypatch):
queryset = Mock(get=Mock(return_value=shipping_method))
monkeypatch.setattr(Checkout, 'shipping_address', shipping_address)
monkeypatch.setattr('saleor.checkout.core.ShippingMethodCountry.objects', queryset)
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
checkout.storage['shipping_method_country_id'] = 1
assert checkout.shipping_method == value
def test_checkout_shipping_does_not_exists(monkeypatch):
queryset = Mock(get=Mock(side_effect=ShippingMethodCountry.DoesNotExist))
monkeypatch.setattr('saleor.checkout.core.ShippingMethodCountry.objects', queryset)
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
checkout.storage['shipping_method_country_id'] = 1
assert checkout.shipping_method is None
def test_checkout_shipping_method_setter():
shipping_method = Mock(id=1)
checkout = Checkout(Mock(), AnonymousUser(), 'tracking_code')
assert checkout.modified is False
checkout.shipping_method = shipping_method
assert checkout.modified is True
assert checkout.storage['shipping_method_country_id'] == 1
@pytest.mark.parametrize('user, address', [
(AnonymousUser(), None),
(Mock(default_billing_address='billing_address',
addresses=Mock(is_authenticated=Mock(return_value=True))), 'billing_address'),
])
def test_checkout_billing_address(user, address):
checkout = Checkout(Mock(), user, 'tracking_code')
assert checkout.billing_address == address
@pytest.mark.parametrize('cart, status_code, url', [
(Mock(__len__=Mock(return_value=0)), 302, '/cart/'),
(Mock(__len__=Mock(return_value=1),
is_shipping_required=Mock(return_value=True)),
302, '/checkout/shipping-address/'),
(Mock(__len__=Mock(return_value=1),
is_shipping_required=Mock(return_value=False)),
302, '/checkout/summary/'),
(Mock(__len__=Mock(return_value=0),
is_shipping_required=Mock(return_value=False)), 302, '/cart/'),
])
def test_index_view(cart, status_code, url, rf):
checkout = Checkout(cart, AnonymousUser(), 'tracking_code')
request = rf.get('checkout:index')
request.user = checkout.user
request.session = {STORAGE_SESSION_KEY: checkout.for_storage()}
request.discounts = []
response = views.index_view(request, checkout, checkout.cart)
assert response.status_code == status_code
assert response.url == url
|
odoocn/odoomrp-wip | mrp_product_variants/__openerp__.py | Python | agpl-3.0 | 1,735 | 0 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
{
"name": "MRP - Product variants",
"version": "1.0",
"depends": [
"product",
"mrp",
"product_variants_no_automatic_creation",
"mrp_production_editable_scheduled_products",
],
"author": "OdooMRP team,"
| "AvanzOSC,"
"Serv. Tecnol. Avanzados - Pedro M. Baeza",
"contributors": [
| "Oihane Crucelaegui <oihanecrucelaegi@avanzosc.es>",
"Pedro M. Baeza <pedro.baeza@serviciosbaeza.com>",
"Ana Juaristi <ajuaristio@gmail.com>",
],
"category": "Manufacturing",
"website": "http://www.odoomrp.com",
"summary": "Customized product in manufacturing",
"data": [
"security/ir.model.access.csv",
"views/mrp_production_view.xml",
"views/product_attribute_view.xml",
],
"installable": True,
"post_init_hook": "assign_product_template",
}
|
yishenggudou/Alistar | Alistar/app.py | Python | bsd-2-clause | 674 | 0.007418 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# yishenggudou@gmail.com
# @timger http://weibo.com/zhanghaibo
__author__ = 'timgerk'
from flask imp | ort Flask, send | _from_directory, redirect
import os
DIR_PATH = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
app = Flask(__name__, static_url_path=DIR_PATH)
@app.route("/")
def index():
return redirect('app/index.html')
@app.route('/app/bower_components/<path:path>')
def send_app(path):
return send_from_directory('bower_components', path)
@app.route('/app/<path:path>')
def send_bower_components(path):
return send_from_directory('app', path)
if __name__ == "__main__":
app.run(debug=True)
|
kottoson/cs3240-labdemo | helper.py | Python | mit | 103 | 0.009709 | # h | elper.py
# K. Ottoson
# February 20, 2017
__author__ = "kjo9fq"
def greeting(msg | ):
print(msg) |
samdroid-apps/sugar | extensions/cpsection/modemconfiguration/__init__.py | Python | gpl-2.0 | 885 | 0 | # Copyright (C) 2009 Paraguay Educa, Martin Abente
#
# This program is free software; you can redistribu | te it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have | received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 US
from gettext import gettext as _
CLASS = 'ModemConfiguration'
ICON = 'module-modemconfiguration'
TITLE = _('Modem Configuration')
|
seibert/numba | numba/tests/test_typeguard.py | Python | bsd-2-clause | 765 | 0 | """
Tests to ensure that typeguard is working as expected.
This mostly contains negative tests as proof that typeguard can catch errors.
"""
import unittest
from numba.tests.support import TestCa | se, skip_unless_typeguard
def guard_args(val: int):
return
def guard_ret(val) -> int:
return val
@skip_unless_typeguard
class TestTypeGuard(TestCase):
def test_check_args(self):
with self.assertRaises(TypeError):
guard_args(float(1.2))
def test_check_ret(self):
with self.assertRaises(TypeError):
guard_ret(float(1.2))
def test_check_does_not_work_with_inner_func(self):
def guard(val: int) -> int:
ret | urn
guard(float(1.2))
if __name__ == '__main__':
unittest.main()
|
bdfoster/blumate | blumate/components/switch/command_line.py | Python | mit | 4,275 | 0 | """
Support for custom shell commands to turn a switch on/off.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.command_line/
"""
import logging
import subprocess
from blumate.components.switch import SwitchDevice
from blumate.const import CONF_VALUE_TEMPLATE
from blumate.helpers import template
_LOGGER = logging.getLogger(__name__)
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Find and return switches controlled by shell commands."""
switches = config.get('switches', {})
devices = []
for dev_name, properties in switches.items():
devices.append(
CommandSwitch(
hass,
properties.get('name', dev_name),
properties.get('oncmd', 'true'),
properties.get('offcmd', 'true'),
properti | es.get('statecmd', False),
properties.get(CONF_VALUE_TEMPLATE, False)))
add_devices_callback(devices)
# pylint: disable=too-many-instance-attributes
class CommandSwitch(SwitchDevice):
"""Representation a switch that can be toggled using shell commands."""
# pylint: disable=too-many-arguments
def __init__(self, | hass, name, command_on, command_off,
command_state, value_template):
"""Initialize the switch."""
self._hass = hass
self._name = name
self._state = False
self._command_on = command_on
self._command_off = command_off
self._command_state = command_state
self._value_template = value_template
@staticmethod
def _switch(command):
"""Execute the actual commands."""
_LOGGER.info('Running command: %s', command)
success = (subprocess.call(command, shell=True) == 0)
if not success:
_LOGGER.error('Command failed: %s', command)
return success
@staticmethod
def _query_state_value(command):
"""Execute state command for return value."""
_LOGGER.info('Running state command: %s', command)
try:
return_value = subprocess.check_output(command, shell=True)
return return_value.strip().decode('utf-8')
except subprocess.CalledProcessError:
_LOGGER.error('Command failed: %s', command)
@staticmethod
def _query_state_code(command):
"""Execute state command for return code."""
_LOGGER.info('Running state command: %s', command)
return subprocess.call(command, shell=True) == 0
@property
def should_poll(self):
"""Only poll if we have state command."""
return self._command_state is not None
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if we do optimistic updates."""
return self._command_state is False
def _query_state(self):
"""Query for state."""
if not self._command_state:
_LOGGER.error('No state command specified')
return
if self._value_template:
return CommandSwitch._query_state_value(self._command_state)
return CommandSwitch._query_state_code(self._command_state)
def update(self):
"""Update device state."""
if self._command_state:
payload = str(self._query_state())
if self._value_template:
payload = template.render_with_possible_json_value(
self._hass, self._value_template, payload)
self._state = (payload.lower() == "true")
def turn_on(self, **kwargs):
"""Turn the device on."""
if (CommandSwitch._switch(self._command_on) and
not self._command_state):
self._state = True
self.update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
if (CommandSwitch._switch(self._command_off) and
not self._command_state):
self._state = False
self.update_ha_state()
|
deepcharles/ruptures | src/ruptures/datasets/pw_constant.py | Python | bsd-2-clause | 1,465 | 0.000683 | """Piecewise constant signal (with noise)"""
import numpy as np
from ruptures.utils import draw_bkps
def pw_constant(
n_samples=200, n_features=1, n_bkps=3, noise_std=None, delta=(1, 10), seed=None
):
"""Return a piecewise constant signal and the associated changepoints.
Args:
n_samples (int): signal length
n_features (int, optional): number of dimensions
n_bkps (int, opti | onal): number of changepoints
noise_std (float, optional): noise std. If None, no noise is added
delta (tuple, optional): (delta_min, delta_max) max and m | in jump values
seed (int): random seed
Returns:
tuple: signal of shape (n_samples, n_features), list of breakpoints
"""
# breakpoints
bkps = draw_bkps(n_samples, n_bkps, seed=seed)
# we create the signal
signal = np.empty((n_samples, n_features), dtype=float)
tt_ = np.arange(n_samples)
delta_min, delta_max = delta
# mean value
center = np.zeros(n_features)
rng = np.random.default_rng(seed=seed)
for ind in np.split(tt_, bkps):
if ind.size > 0:
# jump value
jump = rng.uniform(delta_min, delta_max, size=n_features)
spin = rng.choice([-1, 1], n_features)
center += jump * spin
signal[ind] = center
if noise_std is not None:
noise = rng.normal(size=signal.shape) * noise_std
signal = signal + noise
return signal, bkps
|
spchal/Pwn-Write-ups | protostar/ex_heap1.py | Python | gpl-2.0 | 395 | 0.005063 | from pwn | import *
#change the host IP to your IP
sh = ssh(host='192.168.1.104', user='root',
password='godmode', port=22)
cmd = sh.set_working_directory('/opt/protostar/bin')
e = ELF("./heap1")
puts_add = p32(e.got["puts"])
winner = pack(0x8048494)
print "puts: ", puts_add
arg1 = "A"*20
arg | 1 += puts_add
arg2 = winner
print sh.run(['./heap1', arg1, arg2]).recvall().strip()
|
ActiveState/code | recipes/Python/523034_emulate_collectionsdefaultdict/recipe-523034.py | Python | mit | 1,492 | 0.006032 | try:
from collections import defaultdict
except:
class defaultdict(dict):
def __init__(self, default_factory=None, *a, **kw):
if (default_factory is not None and
not hasattr(default_factory, '__call__')):
raise TypeError('first argument must be callable')
dict.__init__(self, *a, **kw)
self.default_factory = default_factory
def __getitem__(self, key):
try:
return dict.__getitem__(self, key)
except KeyError:
return self.__mis | sing__(key)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __reduce__(self):
if self.default_factory | is None:
args = tuple()
else:
args = self.default_factory,
return type(self), args, None, None, self.items()
def copy(self):
return self.__copy__()
def __copy__(self):
return type(self)(self.default_factory, self)
def __deepcopy__(self, memo):
import copy
return type(self)(self.default_factory,
copy.deepcopy(self.items()))
def __repr__(self):
return 'defaultdict(%s, %s)' % (self.default_factory,
dict.__repr__(self))
|
HoverHell/pyimapsmtpt | pyimapsmtpt/xmpptransport.py | Python | gpl-2.0 | 8,036 | 0.003116 | #!/usr/bin/env python
# coding: utf8
import logging
import signal
import time
import xmpp
from xmpp.browser import (
ERR_ITEM_NOT_FOUND,
ERR_JID_MALFORMED,
NS_COMMANDS,
NS_VERSION,
Browser,
Error,
Message,
NodeProcessed,
Presence,
)
from .common import jid_to_data
_log = logging.getLogger(__name__)
def event_to_data(event, add_event=True):
""" XMPP event to abstractised data """
res = dict(
_event=event,
_type=event.getType(),
frm=jid_to_data(event.getFrom()),
to=jid_to_data(event.getTo()),
body=event.getBody(),
subject=event.getSubject(),
)
return res
#######
## The Transport
#######
class Transport(object):
""" ...
Stopping: `this.online = False`, wait.
"""
online = 1
process_timeout = 5
## Message to be posted to XMPP server as the status when going offline
offlinemsg = ''
## For future filling
disco = None
def __init__(self, config, message_callback=None):
self.config = config
self.jid = config.xmpp_component_jid
self.conn = self._mk_conn(config)
if message_callback is None:
message_callback = lambda *ar, **kwa: None
self.message_callback = message_callback
def _mk_conn(self, config):
sasl = bool(config.xmpp_sasl_username)
if config.dump_protocol:
debug = ['always', 'nodebuilder']
else:
debug = []
xmpp_connection = xmpp.client.Component(
config.xmpp_component_jid, config.xmpp_component_port,
debug=debug,
sasl=sasl,
bind=config.xmpp_use_component_binding,
route=config.xmpp_use_route_wrap)
return xmpp_connection
#######
## Daemonstuff
#######
def setup_signals(self):
signal.signal(signal.SIGINT, self.sighandler)
signal.signal(signal.SIGTERM, self.sighandler)
def sighandler(self, signum, frame):
self.offlinemsg = 'Signal handler called with signal %s' % (signum,)
_log.info('Signal handler called with signal %s', signum)
self.online = 0
def pre_run(self, setup_signals=False, **kwa):
if setup_signals:
self.setup_signals()
if not self.xmpp_connect():
_msg = "Could not connect to XMPP server, or password mismatch."
_log.error(_msg)
raise Exception(_msg)
def run(self, pre_run=True, **kwa):
if pre_run:
self.pre_run(**kwa)
try:
return self.run_loop(**kwa)
finally:
pass
def run_loop(self, **kwa):
while self.online:
try:
conn = self.conn
conn.Process( # pylint: disable=no-member
self.process_timeout)
except KeyboardInterrupt:
raise
except IOError:
self.xmpp_reconnect()
except Exception as exc:
_log.error("xmpp process error: %r", exc)
if not self.conn.isConnected():
self.xmpp_reconnect()
#######
## XMPP stuff
#######
def send_message_data(self, msg_data, **kwa):
## TODO: support error events
msg = Message(**msg_data)
self.send_message(msg)
def send_message(self, msg, **kwa):
conn = self.conn
return conn.send( # pylint: disable=no-member
msg, **kwa)
def register_handlers(self):
conn = self.conn
conn.RegisterHandler( # pylint: disable=no-member
'message', self.xmpp_message)
conn.RegisterHandler( # pylint: disable=no-member
'presence', self.xmpp_presence)
self.disco = Browser()
self.disco.PlugIn(self.conn)
self.disco.setDiscoHandler(
self.xmpp_base_disco, node='',
jid=self.jid)
def xmpp_base_disco(self, con, event, ev_type):
fromjid = str(event.getFrom())
to = event.getTo()
node = event.getQuerynode()
# Type is either 'info' or 'items'
if to == self.jid:
if node == None:
if ev_type == 'info':
return dict(
ids=[dict(
category='gateway', type='smtp', |
name=self.config.xmpp_disco_name)],
features=[NS_VERSION, NS_COMMANDS])
if ev_type == 'items':
return []
else:
self.send_message(Error(event, ERR_ITEM_NOT_FOUND))
| raise NodeProcessed
else:
self.send_message(Error(event, ERR_JID_MALFORMED))
raise NodeProcessed
def xmpp_presence(self, con, event):
# Add ACL support
fromjid = event.getFrom()
ev_type = event.getType()
to = event.getTo()
if ev_type in ('subscribe', 'subscribed', 'unsubscribe', 'unsubscribed', 'unavailable'):
self.send_message(Presence(to=fromjid, frm=to, typ=ev_type))
elif ev_type == 'probe':
self.send_message(Presence(to=fromjid, frm=to))
elif ev_type == 'error':
return
else:
self.send_message(Presence(to=fromjid, frm=to))
def xmpp_connect(self):
connected = self.conn.connect((
self.config.xmpp_main_server, self.config.xmpp_component_port))
_log.info("connected: %r", connected)
while not connected:
time.sleep(5) ## XXXX: ...
connected = self.conn.connect((
self.config.xmpp_main_server, self.config.xmpp_component_port))
_log.info("connected: %r", connected)
self.register_handlers()
_log.info("trying auth")
connected = self.conn.auth(
self.config.xmpp_sasl_username or self.jid,
self.config.xmpp_secret)
_log.info("auth return: %r", connected)
return connected
def xmpp_reconnect(self):
## XXXX: ...Augh.
time.sleep(5) ## XXXX: ...
if not self.conn.reconnectAndReauth():
time.sleep(5) ## XXXX: ...
self.xmpp_connect()
def xmpp_message_preprocess(self, event, con=None):
ev_type = event.getType()
to = event.getTo()
## skip 'error' messages
## (example: recipient not found, `<message from='…'
## to='…@pymailt.…' type='error' id='1'>…<error code='503'
## type='cancel'>…<service-unavailable
## xmlns='urn:ietf:params:xml:ns:xmpp-stanzas'/>…`)
if ev_type == 'error':
_log.error("Error XMPP message: %r, %r", event, str(event))
return
## Messages to nowhere are irrelevant
if to.getNode() == '':
self.send_message(Error(event, ERR_ITEM_NOT_FOUND))
return
## XXXX: unclear. Probably makes sure an empty subject is presented as `None`
try:
if (event.getSubject() or '').strip() == '':
event.setSubject(None)
except AttributeError:
pass
event_data = event_to_data(event)
return event_data
def xmpp_message(self, con, event):
event_data = self.xmpp_message_preprocess(event, con=con)
if not event_data:
return
msg_kwa = dict(event_data, _event=event, _connection=con, _transport=self)
self.message_callback(msg_kwa)
def main():
from .confloader import get_config
config = get_config()
def debug_callback(event, **kwa):
print('\n '.join('%s: %r' % (a, b) for a, b in [
("Event", event),
("type", event.getType()),
("from", event.getFrom()),
("to", event.getTo()),
("...", event.__dict__),
]))
xmpp_transport = Transport(
config=config, message_callback=debug_callback)
xmpp_transport.run()
if __name__ == '__main__':
main()
|
Azure/azure-sdk-for-python | sdk/eventgrid/azure-eventgrid/azure/eventgrid/_generated/_event_grid_publisher_client.py | Python | mit | 3,272 | 0.003667 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from copy import deepcopy
from typing import TYPE_CHECKING
from azure.core import PipelineClient
from msrest import Deserializer, Serializer
from ._configuration import EventGridPublisherClientConfiguration
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Dict
from azure.core.rest import HttpRequest, HttpResponse
class EventGridPu | blisherClient(object):
"""EventGrid Python Publisher Client.
"""
def __init__(
self,
**kwargs # type: Any
):
# type: (...) -> None
base_url = 'https://{topicHostname}'
self._config = EventGridPublisherClientConfiguration(**kwargs)
self._client = PipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {} # type: Dict[str, | Any]
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self._serialize.client_side_validation = False
def send_request(
self,
request, # type: HttpRequest
**kwargs # type: Any
):
# type: (...) -> HttpResponse
"""Runs the network request through the client's chained policies.
We have helper methods to create requests specific to this service in `event_grid_publisher_client.rest`.
Use these helper methods to create the request you pass to this method.
>>> from event_grid_publisher_client.rest import build_publish_events_request
>>> request = build_publish_events_request(json=json, content=content, **kwargs)
<HttpRequest [POST], url: '/api/events'>
>>> response = client.send_request(request)
<HttpResponse: 200 OK>
For more information on this code flow, see https://aka.ms/azsdk/python/protocol/quickstart
For advanced cases, you can also create your own :class:`~azure.core.rest.HttpRequest`
and pass it in.
:param request: The network request you want to make. Required.
:type request: ~azure.core.rest.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to False.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.rest.HttpResponse
"""
request_copy = deepcopy(request)
request_copy.url = self._client.format_url(request_copy.url)
return self._client.send_request(request_copy, **kwargs)
def close(self):
# type: () -> None
self._client.close()
def __enter__(self):
# type: () -> EventGridPublisherClient
self._client.__enter__()
return self
def __exit__(self, *exc_details):
# type: (Any) -> None
self._client.__exit__(*exc_details)
|
anhstudios/swganh | data/scripts/templates/object/tangible/ship/crafted/reactor/shared_reactor_limiter_mk5.py | Python | mit | 479 | 0.045929 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/ship/crafted/reactor/shared_reactor_limiter_mk5.iff"
result.attribute_template_id = 8
result.stfName("space_crafting_n","reactor_limiter_mk5")
#### BEGIN MODIFICA | TIONS ####
#### END MODIFICATIONS #### |
return result |
brentp/goleft | depth/test/cmp.py | Python | mit | 562 | 0.005338 | import subprocess | as sp
import sys
goleft_bed = sys.argv[1]
bam = sys.argv[2]
for toks in (l.rstrip().split("\t") for l in open(goleft_bed)):
cmd = "samtools depth -a -Q 1 -r '%s:%d-%s' %s | awk '{s+=$3}END{if(NR==0){print 0}else{print s/%d}}'" % (toks[0], int(toks[1]) + 1, toks[2], bam, int(toks[2]) - int(toks[1]))
out = sp.check_output(cmd, shell=True).strip()
expected = float(toks[3])
if abs(expected - float(out.st | rip())) > 0.5:
print("ERROR")
print(float(out.strip()), expected)
print(cmd)
sys.exit(1)
|
citrix-openstack-build/pycadf | pycadf/identifier.py | Python | apache-2.0 | 1,525 | 0 | # -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
#
# Author: Matt Rutkowski <mrutkows@us.ibm.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limita | tions
# under the License.
import six
import uuid
from oslo.config import cfg
CONF = cfg.CONF
opts = [
cfg.StrOpt('namespace',
default='openstack',
help='namespace prefix for generated id'),
]
CONF.register_opts(opts, group='audit')
# TODO(mrutkows): make the namespace prefix configurable and have it resolve to
# a full openstack namespace/domain value via some declaration (e.g.
# "openstack:" == "http:\\www.openstac | k.org\")...
def generate_uuid():
return norm_ns(str(uuid.uuid4()))
def norm_ns(str_id):
prefix = CONF.audit.namespace + ':' if CONF.audit.namespace else ''
return prefix + str_id
# TODO(mrutkows): validate any cadf:Identifier (type) record against
# CADF schema. This would include schema validation as an optional parm.
def is_valid(value):
if not isinstance(value, six.string_types):
raise TypeError
return True
|
hipnusleo/laserjet | lib/core/loggers.py | Python | apache-2.0 | 1,506 | 0 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@Author: yyg
@Create: 2016MMDD
@LastUpdate: 2016-12-15 HH:MM:SS
@Version: 0.0
"""
from json import load
from logging import (Formatter, _defaultFormatter, exception,
getLogger, FileHandler, basicConfig, StreamHandler)
from cloghandler import ConcurrentRotatingFileHandler
from params import (LOG_CONF_FILE, LOG_LVL, LOGGER_NAME,
LOG_FILE, LOG_DAT_FMT, LOG_FMT)
class LaserjetLogger(object):
"""
Compatible to python 2.6+
"""
def __init__(self):
self.fmt = LOG_FMT
self.datefmt | = LOG_DAT_FMT
self._start()
def _start(self):
logger = getLogger(LOGGER_NAME)
log_handler = ConcurrentRotatingFileHandler(LOG_FILE)
log_formatter = Formatter(self.fmt, self.datefmt)
log_handler.setFormatter(log_formatter)
console_handler = StreamHandler()
console_handler.setFormatter(log_formatter)
logger.setLevel(LOG_LVL)
logger.addHandler(log_handler)
logger.addHandler(console_handler) |
logger.info("Logger activated")
def print_func(anything_str):
log = getLogger(LOGGER_NAME)
log.info(anything_str)
if __name__ == "__main__":
logger = LaserjetLogger()
test_pool = Pool()
for i in range(5):
test_pool.apply_async(print_func, args=(i,))
test_pool.close()
test_pool.join()
|
bossiernesto/melta | test/fixture/class_repositories.py | Python | bsd-3-clause | 637 | 0.00157 | from melta.dynamic.propertyMaker import PropertyMaker
property_maker = PropertyMaker()
class Person:
pass
person1 = Person()
property_maker.buildProperty(person1, "edad", 20) \
.buildProperty(person1, "altura", 180) \
.buildProperty(pe | rson1, "sexo", "male")
class House:
pass
house1 = House()
property_maker.buildProperty(house1, "antiguedad", 32) \
.buildProperty(house1, "tipo_casa", "bungalow") \
.buildProperty(house1, "mt2", 360)
house2 = House()
property_maker.buildProperty(house2, "building_age", 34) \
| .buildProperty(house2, "material", "brick") \
.buildProperty(house2, "sq2mts", 453) |
wattlebird/Chi | www/app/data.py | Python | mit | 3,269 | 0.033344 | from app import db, cache
from model import UserInfo
import pickle
from random import seed, randint
from heapq import nlargest
seed()
cache.clear()
fr = open('dat/a.dat','rb')
U = pickle.load(fr) # a user_num x 100 mat
unorm = pickle.load(fr)
fr.close()
#for i in xrange(len(unorm)):
# unorm[i]+=1
class DUser:
def __init__(self, id, sim):
self.id=id
self.sim=sim
def __lt__(self, other):
return self.sim<other.sim
def __le__(self, other):
return self.sim<=other.sim
def __eq__(self, other):
return self.sim==other.sim
def __ne__(self, other):
return self.sim<>other.sim
def __gt__(self, other):
return self.sim>other.sim
def __ge__(self, other):
return self.sim>=other.sim
@cache.memoize(timeout=600)
def qualified(db, username):
q=UserInfo.query.filter_by(name=username).first()
# q=db.session.query(UserInfo.name, UserInfo.count).filter(UserInfo.name=username).first()
if q and q.count:
return 1
elif q:
return 0
else:
return -1
@cache.memoize(timeout=600)
def similarlist(db, username):
q=UserInfo.query.filter_by(name=username).first()
simv=U.dot(U[q.index,:].T).toarray()
qlist=[]
for i in xrange(U.shape[0]):
qlist.append(DUser(id=i,
sim=simv[i][0]/(unorm[q.in | dex]*unorm[i])))
slist=nlargest(11,qlist)
rlist=[]
for i in xrange(1,11):
q=UserInfo.query.filter_by(index=slist[i].id).first()
rlist.append((q.name,round(_normalize(slist[i].sim),4)))
return rlist
@cache.memoize(timeout=600)
def getsim(db, username, candidate):
q=UserInfo.query.f | ilter_by(name=username).first()
u=U[q.index,:]
p=UserInfo.query.filter_by(name=candidate).first()
v=U[p.index,:]
return round(_normalize(u.dot(v.T).toarray()[0][0]/(unorm[q.index]*unorm[p.index])),4)
@cache.memoize(timeout=600)
def getrank(db, username, candidate):
q=UserInfo.query.filter_by(name=username).first()
simv=U.dot(U[q.index,:].T).toarray()
p=UserInfo.query.filter_by(name=candidate).first()
cnt=0
candidatesim = simv[p.index][0]/(unorm[q.index]*unorm[p.index])
for i in xrange(U.shape[0]):
if candidatesim<simv[i][0]/(unorm[q.index]*unorm[i]):
cnt+=1
raise Exception
return cnt
def _rank(a, i):
b=0
e=len(a)
a[b],a[i]=a[i],a[b]
i=j=1
while i!=e:
if a[b]<a[i]:
a[i],a[j]=a[j],a[i]
j+=1
i+=1
return j-1
def _pick_top_ten(qlist):
_qpick_ten(qlist,0,len(qlist))
_insort(qlist,0,11)
return qlist[1:11]
def _qpick_ten(a,b,e):
if e>11:
i=randint(b,e-1)
a[b],a[i]=a[i],a[b]
i=j=1
while i!=e:
if a[b]<a[i]:
a[i],a[j]=a[j],a[i]
j+=1
i+=1
a[b],a[j-1]=a[j-1],a[b]
if j==11:
return
elif j>11:
_qpick_ten(a,b,j)
else:
_qpick_ten(a,j,e)
def _insort(a,b,e):
for i in xrange(b+1,e):
key=a[i]
j=i-1
while j>=0:
if a[j]>key:
break
a[j+1]=a[j]
j-=1
a[j+1]=key
def _normalize(num):
return (num+1)/2 |
deepmind/interval-bound-propagation | examples/train.py | Python | apache-2.0 | 10,867 | 0.005521 | # coding=utf-8
# Copyright 2019 The Interval Bound Propagation Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Trains a verifiable model on Mnist or CIFAR-10."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import app
from absl import flags
from absl import logging
import interval_bound_propagation as ibp
import tensorflow.compat.v1 as tf
FLAGS = flags.FLAGS
flags.DEFINE_enum('dataset', 'mnist', ['mnist', 'cifar10'],
'Dataset (either "mnist" or "cifar10").')
flags.DEFINE_enum('model', 'tiny', ['tiny', 'small', 'medium', 'large'],
'Model size.')
flags.DEFINE_string('output_dir', '/tmp/ibp_model', 'Output directory.')
# Options.
flags.DEFINE_integer('steps', 60001, 'Number of steps in total.')
flags.DEFINE_integer('test_every_n', 2000,
'Number of steps between testing iterations.')
flags.DEFINE_integer('warmup_steps', 2000, 'Number of warm-up steps.')
flags.DEFINE_integer('rampup_steps', 10000, 'Number of ramp-up steps.')
flags.DEFINE_integer('batch_size', 200, 'Batch size.')
flags.DEFINE_float('epsilon', .3, 'Target epsilon.')
flags.DEFINE_float('epsilon_train', .33, 'Train epsilon.')
flags.DEFINE_string('learning_rate', '1e-3,1e-4@15000,1e-5@25000',
'Learning rate schedule of the form: '
'initial_learning_rate[,learning:steps]*. E.g., "1e-3" or '
'"1e-3,1e-4@15000,1e-5@25000".')
flags.DEFINE_float('nominal_xent_init', 1.,
'Initial weight for the nominal cross-entropy.')
flags.DEFINE_float('nominal_xent_final', .5,
'Final weight for the nominal cross-entropy.')
flags.DEFINE_float('verified_xent_init', 0.,
'Initial weight for the verified cross-entropy.')
flags.DEFINE_float('verified_xent_final', .5,
'Final weight for the verified cross-entropy.')
flags.DEFINE_float('crown_bound_init', 0.,
'Initial weight for mixing the CROWN bound with the IBP '
'bound in the verified cross-entropy.')
flags.DEFINE_float('crown_bound_final', 0.,
'Final weight for mixing the CROWN bound with the IBP '
'bound in the verified cross-entropy.')
flags.DEFINE_float('attack_xent_init', 0.,
'Initial weight for the attack cross-entropy.')
flags.DEFINE_float('attack_xent_final', 0.,
'Initial weight for the attack cross-entropy.')
def show_metrics(step_value, metric_values, loss_value=None):
print('{}: {}nominal accuracy = {:.2f}%, '
'verified = {:.2f}%, attack = {:.2f}%'.format(
step_value,
'loss = {}, '.format(loss_value) if loss_value is not None else '',
metric_values.nominal_accuracy * 100.,
metric_values.verified_accuracy * 100.,
metric_values.attack_accuracy * 100.))
def layers(model_size):
"""Returns the layer specification for a given model name."""
if model_size == 'tiny':
return (
('linear', 100),
('activation', 'relu'))
elif model_size == 'small':
return (
('conv2d', (4, 4), 16, 'VALID', 2),
('activation', 'relu'),
('conv2d', (4, 4), 32, 'VALID', 1),
('activation', 'relu'),
('linear', 100),
('activation', 'relu'))
elif model_size == 'medium':
return (
('conv2d', (3, 3), 32, 'VALID', 1),
('activation', 'relu'),
('conv2d', (4, 4), 32, 'VALID', 2),
('activation', 'relu'),
('conv2d', (3, 3), 64, 'VALID', 1),
('activation', 'relu'),
('conv2d', (4, 4), 64, 'VALID', 2),
('activation', 'relu'),
('linear', 512),
('activation', 'relu'),
('linear', 512),
('activation', 'relu'))
elif model_size == 'large':
return (
('conv2d', (3, 3), 64, 'SAME', 1),
('activation', 'relu'),
('conv2d', (3, 3), 64, 'SAME', 1),
('activation', 'relu'),
('conv2d', (3, 3), 128, 'SAME', 2),
('activation', 'relu'),
('conv2d', (3, 3), 128, 'SAME', 1),
('activation', 'relu'),
('conv2d', (3, 3), 128, 'SAME', 1),
('activation', 'relu'),
('linear', 512),
('activation', 'relu'))
else:
raise ValueError('Unknown model: "{}"'.format(model_size))
def main(unused_args):
logging.info('Training IBP on %s...', FLAGS.dataset.upper())
step = tf.train.get_or_create_global_step()
# Learning rate.
learning_rate = ibp.parse_learning_rate(step, FLAGS.learning_rate)
# Dataset.
input_bounds = (0., 1.)
num_classes = 10
if FLAGS.dataset == 'mnist':
data_train, data_test = tf.keras.datasets.mnist.load_data()
else:
assert FLAGS.dataset == 'cifar10', (
'Unknown dataset "{}"'.format(FLAGS.dataset))
data_train, d | ata_test = tf.keras.datasets.cifar10.load_data()
data_train = (data_train[0], data_train[1].flatten())
data_test = (data_tes | t[0], data_test[1].flatten())
data = ibp.build_dataset(data_train, batch_size=FLAGS.batch_size,
sequential=False)
if FLAGS.dataset == 'cifar10':
data = data._replace(image=ibp.randomize(
data.image, (32, 32, 3), expand_shape=(40, 40, 3),
crop_shape=(32, 32, 3), vertical_flip=True))
# Base predictor network.
original_predictor = ibp.DNN(num_classes, layers(FLAGS.model))
predictor = original_predictor
if FLAGS.dataset == 'cifar10':
mean = (0.4914, 0.4822, 0.4465)
std = (0.2023, 0.1994, 0.2010)
predictor = ibp.add_image_normalization(original_predictor, mean, std)
if FLAGS.crown_bound_init > 0 or FLAGS.crown_bound_final > 0:
logging.info('Using CROWN-IBP loss.')
model_wrapper = ibp.crown.VerifiableModelWrapper
loss_helper = ibp.crown.create_classification_losses
else:
model_wrapper = ibp.VerifiableModelWrapper
loss_helper = ibp.create_classification_losses
predictor = model_wrapper(predictor)
# Training.
train_losses, train_loss, _ = loss_helper(
step,
data.image,
data.label,
predictor,
FLAGS.epsilon_train,
loss_weights={
'nominal': {
'init': FLAGS.nominal_xent_init,
'final': FLAGS.nominal_xent_final,
'warmup': FLAGS.verified_xent_init + FLAGS.nominal_xent_init
},
'attack': {
'init': FLAGS.attack_xent_init,
'final': FLAGS.attack_xent_final
},
'verified': {
'init': FLAGS.verified_xent_init,
'final': FLAGS.verified_xent_final,
'warmup': 0.
},
'crown_bound': {
'init': FLAGS.crown_bound_init,
'final': FLAGS.crown_bound_final,
'warmup': 0.
},
},
warmup_steps=FLAGS.warmup_steps,
rampup_steps=FLAGS.rampup_steps,
input_bounds=input_bounds)
saver = tf.train.Saver(original_predictor.get_variables())
optimizer = tf.train.AdamOptimizer(learning_rate)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
train_op = optimizer.minimize(train_loss, step)
# Test using while loop.
def get_test_metrics(batch_size, attack_builder=ibp.UntargetedPGDAttack):
"""Returns the test metrics."""
num_test_batches = len(data_test[0]) // batch_size
assert len(data_test[0]) % batch_size == 0, (
'Test data is not a multiple of batch size.')
def cond(i, *unused_args):
return i < num_test_batches
def body(i, metrics):
"""Compute the s |
drhagen/parsita | examples/positioned.py | Python | mit | 3,313 | 0.000906 | """User-defined positioned parser example.
This shows how a new parser can be defined outside Parsita and used in tandem
with the built-in parsers. The ``positioned`` parser updates the value
returned from an arbitrary parser with the position in the input that was
consumed by that parser.
"""
from abc import abstractmethod
from dataclasses import dataclass
from typing import Generic
from parsita import Parser, TextParsers, reg
from parsita.state import Continue, Input, Output, Reader, Status
from parsita.util import splat
class PositionAware(Generic[Output]):
"""An object which can cooperate with the positioned parser.
The ``positioned`` parser calls the ``set_position`` method on values it
receives. This abstract base class marks those objects that can cooperate
with ``positioned`` in this way and receive the input position to produce
the final value.
"""
@abstractmethod
def set_position(self, start: int, length: int) -> Output:
"""Produce a new value with the position set.
This abstract method must be implemented by subclasses of
``PositionAware``. It receives the position in the input that was
consumed and returns a new value, typically an object similar to the old
value, but with the position set. Important: the old value is not
expected to be mutated.
Args:
start: The index of the first character consumed by the parser
length: The number of characters consumed by the parser
"""
pass
class PositionedParser(Generic[Input, Output], Parser[Input, Output]):
def __init__(self, parser: Parser[Input, PositionAware[Output]]):
super().__init__()
self.parser = parser
def consume(self, reader: Reader[Input]) -> Status[Input, Output]:
start = reader.position
status = self.parser.consume(reader)
if isinstance(status, Continue):
end = status.remainder.position
return Continue(status.remainder, status.value.set_position(start, end - start)).merge(status)
else:
return status
def __repr__(self):
return self.name_or_nothing() + "positioned({})".format(self.parser.name_or_repr())
def positioned(parser: Parser[Input, PositionAware[Output]]):
"""Set the position on a PositionAware value.
This parser matches ``parser`` and, if successful, calls ``set_position``
on the produced value to produce a new value. The value produces by
``parser`` must implement the ``PositionAware`` interface so that it can
receive the position in the input.
Args:
parser: Parser
"""
return PositionedParser(parser)
# Everything below here is an example use case
@dataclass
class UnfinishedVariable(PositionAware):
name: str
def set_position(self, start: int, length: int):
return Variable(self.name, start, length)
@dataclass
class Variable:
name: str
start: int
length: int
@dataclass
class Plus:
first: Variable
s | econd: Variable
class PlusParsers(TextParsers):
variable = positioned(reg("[A-Za-z][A-Za-z0-9_]*") > UnfinishedVariable)
plus = variable & "+" >> variable > splat(Pl | us)
if __name__ == "__main__":
print(PlusParsers.plus.parse("abc + xyz").or_die())
|
lxp/apt2d8 | src/apt2d8/rpc/rpcexception.py | Python | gpl-3.0 | 774 | 0.005168 | #
# This fi | le is part of apt2d8.
#
# Copyright (C) 2013 David Gnedt
#
# apt2d8 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public | License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# apt2d8 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with apt2d8. If not, see <http://www.gnu.org/licenses/>.
#
class ProtocolException(Exception):
pass
class RemoteException(Exception):
pass
|
PermutaTriangle/PermStruct | examples/classical_5x4/1234_1243_2134_2431_4213.py | Python | bsd-3-clause | 693 | 0.010101 | from __future__ import print_function
from permuta import *
import permstruct
import permstruct.dag
from permstruct import *
from permstruct.dag import taylor_dag
import sys
# -- Example from Kuszmaul paper -- #
# STATUS ================================================ >
task = '1234_1243_2134_2431_4213'
p | atts = [ Permutation([ int(c) for c in p ]) for p in task.split('_') ]
# patts | = [Permutation([5,2,3,4,1]), Permutation([5,3,2,4,1]), Permutation([5,2,4,3,1]), Permutation([3,5,1,4,2]), Permutation([4,2,5,1,3]), Permutation([3,5,1,6,2,4])]
struct(patts, size=6, perm_bound = 8, subpatts_len=4, subpatts_num=3)
# struct(patts, size = 4, verify_bound = 10, ask_verify_higher = True)
|
gasman/pyrecoil | setup.py | Python | gpl-2.0 | 1,164 | 0.003436 | from setuptools import setup, find_packages, Extension
recoil_module = Extension('_recoil', sources=['recoil_interface.c', 'recoil.c'])
def readme():
with open('README.rst') as f:
return f.read()
setup(
name="pyrecoil",
version="0.3.1",
packages=find_packages(),
ext_modules=[recoil_module],
include_package_data=True,
author="Matt Westcott",
author_email="matt@west.co.tt",
description="Python bindings for RECOIL, the Retro Computer Image Library",
long_description=readme(),
classifie | rs=[
"Development Status :: 5 - Production/Stable",
"Topic :: Multimedia :: Graphics",
"Topic :: Multim | edia :: Graphics :: Graphics Conversion",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)",
],
url="http://github.com/gasman/pyrecoil",
license="GPLv2+",
)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.