repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
ruipgpinheiro/subuser | logic/subuserlib/classes/subuserSubmodules/run/runReadyImage.py | Python | lgpl-3.0 | 2,881 | 0.012843 | #!/usr/bin/env python
# This file should be compatible with both Python 2 and 3.
# If it is not, please file a bug report.
"""
Contains code that prepairs a subuser's image to be run.
"""
#external imports
import os
#internal imports
from subuserlib.classes.userOwnedObject import UserOwnedObject
class RunReadyImage(UserOwnedObject):
def __init__(self,user,subuser):
self.__subuser = subuser
self.__id = None
UserOwnedObject.__init__(self,user)
def setup(self):
if not "run-ready-image-id" in self.getSubuser().getRuntimeCache():
self.__id = self.build()
self.getSubuser().getRuntimeCache()["run-ready-image-id"] = self.__id
self.getSubuser().getRuntimeCache().save()
def getSubuser(self):
return self.__subuser
def getId(self):
if not self.__id:
self.__id = self.getSubuser().getRuntimeCache()["run-ready-image-id"]
return self.__id
def generateImagePreparationDockerfile(self):
"""
There is still some preparation that needs to be done before an image is ready to be run. But this preparation requires run time information, so we cannot preform that preparation at build time.
"""
dockerfileContents = "FROM "+self.getSubuser().getImageId()+"\n"
dockerfileContents += "RUN useradd --uid="+str(self.getUser().getEndUser().uid)+" "+self.getUser().getEndUser().name+" ;export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo uid exists ; elif [ $exitstatus -eq 9 ]; then echo username exists. ; else exit $exitstatus ; fi\n"
dockerfileContents += "RUN test -d "+self.getUser().getEndUser().homeDir+" || mkdir "+self.getUser().getEndUser().homeDir+" && chown "+self.getUser().getEndUser().name+" "+self.getUser().getEndUser().homeDir+"\n"
if self.getSubuser().getPermissions()["serial-devices"]:
dockerfileContents += "RUN groupadd dialout; export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo gid exists ; elif [ $exitstatus -eq 9 ]; then echo groupname exists. ; else exit $exitstatus ; fi\n"
dockerfileContents += "RUN groupadd uucp; export exitstatus=$? ; if [ $exitstatus -eq 4 ] ; then echo gid exists ; elif [ $exitstatus -eq 9 ]; then echo groupname exists. ; else exit $exitstatus ; fi\n"
dockerfileContents += "RUN usermod -a -G dialout "+self.getUser().getEndUser().name+"\n"
dockerfileContents += "RUN usermod -a -G uucp "+self.getUser().getEndUser().name+"\n"
if self.getSubuser().getPermissions()["sudo"]:
docke | rfileContents += "RUN (umask 337; echo \""+self.getUser().getEndUser().name+" ALL=(ALL) NOPASSWD: ALL\" > /etc/sudoers.d/allowsudo )\n"
return dockerfileContents
def build(self):
"""
Returns the Id of the Docker image to be run.
"""
return self.getUser().getDockerDaemon().build(None,quietClient=True,useCache=True | ,forceRm=True,rm=True,dockerfile=self.generateImagePreparationDockerfile())
|
nyu-dl/dl4mt-simul-trans | policy.py | Python | bsd-3-clause | 23,644 | 0.007867 | """
-- Policy Network for decision making [more general]
"""
from nmt_uni import *
from layers import _p
import os
import time, datetime
import cPickle as pkl
# hyper params
TINY = 1e-7
PI = numpy.pi
E = numpy.e
A = 0.2
B = 1
class Controller(object):
def __init__(self, trng,
options,
n_in=None, n_out=None,
recurrent=False, id=None):
self.WORK = options['workspace']
self.trng = trng
self.options = options
self.recurrent = recurrent
self.type = options.get('type', 'categorical')
self.n_hidden | = 128
self.n_in = n_in
self.n_out = n_out
if self.options.get('layernorm', True):
self.rec = 'lngru'
else: |
self.rec = 'gru'
if not n_in:
self.n_in = options['readout_dim']
if not n_out:
if self.type == 'categorical':
self.n_out = 2 # initially it is a WAIT/COMMIT action.
elif self.type == 'gaussian':
self.n_out = 100
else:
raise NotImplementedError
# build the policy network
print 'parameter initialization'
params = OrderedDict()
if not self.recurrent:
print 'building a feedforward controller'
params = get_layer('ff')[0](options, params, prefix='policy_net_in',
nin=self.n_in, nout=self.n_hidden)
else:
print 'building a recurrent controller'
params = get_layer(self.rec)[0](options, params, prefix='policy_net_in',
nin=self.n_in, dim=self.n_hidden)
params = get_layer('ff')[0](options, params, prefix='policy_net_out',
nin=self.n_hidden,
nout=self.n_out if self.type == 'categorical' else self.n_out * 2)
# bias the forget probability
# if self.n_out == 3:
# params[_p('policy_net_out', 'b')][-1] = -2
# for the baseline network.
params_b = OrderedDict()
# using a scalar baseline [**]
# params_b['b0'] = numpy.array(numpy.random.rand() * 0.0, dtype='float32')
# using a MLP as a baseline
params_b = get_layer('ff')[0](options, params_b, prefix='baseline_net_in',
nin=self.n_in, nout=128)
params_b = get_layer('ff')[0](options, params_b, prefix='baseline_net_out',
nin=128, nout=1)
if id is not None:
print 'reload the saved model: {}'.format(id)
params = load_params(self.WORK + '.policy/{}-{}.current.npz'.format(id, self.options['base']), params)
params_b = load_params(self.WORK + '.policy/{}-{}.current.npz'.format(id, self.options['base']), params_b)
else:
id = datetime.datetime.fromtimestamp(time.time()).strftime('%y%m%d-%H%M%S')
print 'start from a new model: {}'.format(id)
self.id = id
self.model = self.WORK + '.policy/{}-{}'.format(id, self.options['base'])
# theano shared params
tparams = init_tparams(params)
tparams_b = init_tparams(params_b)
self.tparams = tparams
self.tparams_b = tparams_b
# build the policy network
self.build_sampler(options=options)
self.build_discriminator(options=options)
print 'policy network'
for p in params:
print p, params[p].shape
def build_batchnorm(self, observation, mask=None):
raise NotImplementedError
def build_sampler(self, options):
# ==================================================================================== #
# Build Action function: samplers
# ==================================================================================== #
observation = tensor.matrix('observation', dtype='float32') # batch_size x readout_dim (seq_steps=1)
prev_hidden = tensor.matrix('p_hidden', dtype='float32')
if not self.recurrent:
hiddens = get_layer('ff')[1](self.tparams, observation,
options, prefix='policy_net_in',
activ='tanh')
else:
hiddens = get_layer(self.rec)[1](self.tparams, observation,
options, prefix='policy_net_in', mask=None,
one_step=True, _init_state=prev_hidden)[0]
act_inps = [observation, prev_hidden]
if self.type == 'categorical':
act_prob = get_layer('ff')[1](self.tparams, hiddens, options,
prefix='policy_net_out',
activ='softmax') # batch_size x n_out
act_prob2 = tensor.clip(act_prob, TINY, 1 - TINY)
# compiling the sampling function for action
# action = self.trng.binomial(size=act_prop.shape, p=act_prop)
action = self.trng.multinomial(pvals=act_prob).argmax(1) # 0, 1, ...
print 'build action sampling function [Discrete]'
self.f_action = theano.function(act_inps, [action, act_prob, hiddens, act_prob2],
on_unused_input='ignore') # action/dist/hiddens
elif self.type == 'gaussian':
_temp = get_layer('ff')[1](self.tparams, hiddens, options,
prefix='policy_net_out',
activ='linear'
) # batch_size x n_out
mean, log_std = _temp[:, :self.n_out], _temp[:, self.n_out:]
mean, log_std = -A * tanh(mean), -B-relu(log_std)
action0 = self.trng.normal(size=mean.shape, dtype='float32')
action = action0 * tensor.exp(log_std) + mean
print 'build action sampling function [Gaussian]'
self.f_action = theano.function(act_inps, [action, mean, log_std, hiddens],
on_unused_input='ignore') # action/dist/hiddens
else:
raise NotImplementedError
def build_discriminator(self, options):
# ==================================================================================== #
# Build Action Discriminator
# ==================================================================================== #
observations = tensor.tensor3('observations', dtype='float32')
mask = tensor.matrix('mask', dtype='float32')
if self.type == 'categorical':
actions = tensor.matrix('actions', dtype='int64')
elif self.type == 'gaussian':
actions = tensor.tensor3('actions', dtype='float32')
else:
raise NotImplementedError
if not self.recurrent:
hiddens = get_layer('ff')[1](self.tparams, observations,
options, prefix='policy_net_in',
activ='tanh')
else:
hiddens = get_layer(self.rec)[1](self.tparams, observations,
options, prefix='policy_net_in', mask=mask)[0]
act_inputs = [observations, mask]
if self.type == 'categorical':
act_probs = get_layer('ff')[1](self.tparams, hiddens, options, prefix='policy_net_out',
activ='softmax') # seq_steps x batch_size x n_out
act_probs = tensor.clip(act_probs, TINY, 1 - TINY)
print 'build action distribiution'
self.f_probs = theano.function(act_inputs, act_probs,
on_unused_input='ignore') # get the action probabilities
elif self.type == 'gaussian':
_temps = get_layer('ff')[1](self.tparams, hiddens, options,
|
mrrrgn/releasetasks | releasetasks/__init__.py | Python | mpl-2.0 | 1,546 | 0.001294 | # -*- coding: utf-8 -*-
from os import path
import yaml
import arrow
from chunkify import chunkify
from jinja2 import Environment, FileSystemLoader, StrictUndefined
from taskcluster.utils import stableSlugId
DEFAULT_TEMPLATE_DIR = path.join(path.dirname(__file__), "templates")
def make_task_graph(root_template="release_graph.yml.tmpl", template_dir=DEFAULT_TEMPLATE_DIR, **template_kwargs):
# TODO: some validation of template_kwargs + defaults
env = Environment(loader=FileSystemLoader(template_dir), undefined=StrictUndefined)
now = arrow.now()
now_ms = now.timestamp * 1000
template = env.get_template(root_template)
template_vars = {
"stableSlugId": stableSlugId(),
"chunkify": chunkify,
"sorted": sorted,
"now": now,
"now_ms": now_ms,
# This is used in defining expirations in tasks. There's no way to
# actually tell Taskcluster never to expire them, but 1,000 years
# is as good as never....
"never": arrow.now().replace(years=1000),
"get_complete_mar_url": lambda a, b, c, d: "COMPLETE MAR URL",
"get_complete_mar_artifact": lambda a, b, c: "COMPLETE MAR ARTIFACT",
# TODO: this should be a hash of the revisions in the push
"revision_hash": "abcdef",
"get_treeherder_platform": lambda p: p,
# TODO: unstub this
"encrypt_env_var": lambda a, b, c, d, e: "ENCRYPTED",
}
template_vars.update(tem | plate_ | kwargs)
return yaml.safe_load(template.render(**template_vars))
|
lutris/website | common/management/commands/anon_db.py | Python | agpl-3.0 | 2,839 | 0 | """Remove any personally identifying information from the database"""
from django.core.management.base import BaseCommand
from django.conf import settings
from django.contrib.admin.models import LogEntry
from django_openid_auth.models import UserOpenID
from rest_framework.authtoken.models import Token
from reversion.models import Revision
from games.models import (
Installer,
InstallerIssue,
InstallerIssueReply,
InstallerHistory,
Screenshot,
GameSubmission,
)
from accounts.models import User
from common.models import Upload, News
class Command(BaseCommand):
"""Django command to anonymize the database"""
@staticmethod
def get_main_user():
"""Return the only user remaining in the DB"""
return User.objects.first()
@staticmethod
def delete_tokens():
"""Remove all auth tokens (OpenID, DRF, ...)"""
res = UserOpenID.objects.all().delete()
print("Deleted %s openids" % res[0])
res = Token.objects.all().delete()
print("Deleted %s tokens" % res[0])
res = LogEntry.objects.all().delete()
print("Deleted %s log entries" % res[0])
def handle(self, *args, **kwargs):
if not settings.DEBUG:
raise RuntimeError("Never run this in production")
self.delete_tokens()
user = self.get_main_user()
res = InstallerIssue.objects.all().update(submitted_by=user)
print("Updated %s issues" % res)
res = InstallerIssueReply.objects.all().update(submitted_by=user)
print("Updated %s issue replies" % res)
res = InstallerHistory.objects.all().update(user=user)
print("Updated %s installer history" % res)
res = Installer.objects.all().update(user=user)
print("Updated %s installers" % res)
res = InstallerHistory.objects.all().update(user=user)
print("Updated %s installer history" % res)
res = GameSubmission.objects.all().update(user=user)
print("Updated %s game submissions" % res)
res = Screenshot.objects.all().update(uploaded_by=user)
print("Updated %s screenshots" % res)
res = Upload.objects.all().update(uploaded_by=user)
p | rint("Updated %s uploads" % res)
res = News.objects.all().update(user=user)
print("Updated %s news" % res)
res = Revision.objects.all().update(user=user)
print("Updated %s revisions" % res)
res = User.objects.exclude(pk=user.id).delete()
print("Deleted %s users" % res[0])
default_password = "lutris"
user.set_password(defau | lt_password)
user.username = "lutris"
user.email = "root@localhost"
user.website = ""
user.steamid = ""
user.save()
print("Password for user %s is now %s" % (user, default_password))
|
shayneholmes/plover | plover/logger.py | Python | gpl-2.0 | 1,566 | 0.000639 | # Copyright (c) 2013 Hesky Fisher
# See LICENSE.txt for details.
"""A module to handle logging."""
import logging
from logging.handlers import RotatingFileHandler
LOGGER_NAME = 'plover_logger'
LOG_FOR | MAT = '%(asctime)s %(message)s'
LOG_MAX_BYTES = 10000000
LOG_COUNT = 9
class Logger(object):
def __init__(self):
self._logger = logging | .getLogger(LOGGER_NAME)
self._logger.setLevel(logging.DEBUG)
self._handler = None
self._log_strokes = False
self._log_translations = False
def set_filename(self, filename):
if self._handler:
self._logger.removeHandler(self._handler)
handler = None
if filename:
handler = RotatingFileHandler(filename, maxBytes=LOG_MAX_BYTES,
backupCount=LOG_COUNT,)
handler.setFormatter(logging.Formatter(LOG_FORMAT))
self._logger.addHandler(handler)
self._handler = handler
def enable_stroke_logging(self, b):
self._log_strokes = b
def enable_translation_logging(self, b):
self._log_translations = b
def log_stroke(self, steno_keys):
if self._log_strokes and self._handler:
self._logger.info('Stroke(%s)' % ' '.join(steno_keys))
def log_translation(self, undo, do, prev):
if self._log_translations and self._handler:
# TODO: Figure out what to actually log here.
for u in undo:
self._logger.info('*%s', u)
for d in do:
self._logger.info(d)
|
frenetic-lang/netcore-1.0 | examples/Campus.py | Python | bsd-3-clause | 1,239 | 0.026634 | #!/usr/bin/python
from mininet.topo import Topo, Node
class CampusTopo( Topo ):
"A simple example of a small campus network."
def __init__(self, enable_all = True):
" Create a campus topology."
super( CampusTopo, self).__init__()
# Add switches and hosts.
switches = [1, 2, 3]
trustedUsers = [101, 102]
secureServers = [201]
untrustedUsers = [301, 302, 303]
for switch in switches:
self.add_node( switch, Node( is_switch=True ) )
for host in trustedUsers + secureServers + untrustedUsers:
self.add_node( host, Node (is_switch=False ) )
# Add edges.
self.add_edge( switches[0], switches[1] )
self.add_edge( switches[0], swi | tches[2] )
self.add_edge( switches[1], switches[2] )
for host in trustedUsers:
self.add_edge( host, switches[0] )
for host in secureServers:
self.add_edge( host, switches[1] )
for host in untrustedUsers:
self.add_edge( host, switches[2] )
# Consider all switches and hosts 'on'.
self.enable_all( | )
# Let mininet run this topo from the command line.
topos = { 'campus' : ( lambda: CampusTopo() ) }
|
timgilbert/how-you-been | src/howyoubeen/Foursquare.py | Python | mit | 3,878 | 0.009025 | import string, urllib, urllib2, logging
from webapp2_extras import json
import Handlers, Config
class FoursquareException(Exception):
def __init__(self, message, value):
super(self, Exception).__init__(message)
self.value = value
class FoursquareApiException(FoursquareException): pass
class FoursquareMixin(Handlers.WebAuth, Config.ConfigAware):
"""This is a mixin class which has several convenience methods for
constructing foursquare-related URL, based on the assumption that
a configParser instance is in the app configuration."""
# Default setting for config lookups
DEFAULT_SETTING_GROUP = 'foursquare'
# Name of the session cookie we store data in
OAUTH_COOKIE = 'foursquare.oauth'
def getAuthRedirectUrl(self):
"""Construct the URL we'll initially use to send users off to foursquare for OAuth"""
url = ('https://foursquare.com/oauth2/authenticate' +
'?client_id=' + self.cfg('client_id') +
'&response_type=code' +
'&redirect_uri=' + self.cfg('callback'))
return url
def foursquareAccessTokenUrl(self, code):
"""Construct a URL to use to get an access token from foursquare's OAuth"""
url = ('https://foursquare.com/oauth2/access_token' +
'?client_id=' + self.cfg('client_id') +
'&client_secret=' + self.cfg('client_secret') +
'&grant_type=authorization_code' +
'&redirect_uri=' + self.cfg('callback') +
'&code=' + urllib.quote(code))
return url
def getFoursquareAccessToken(self, code):
"""Given an access code, make an OAuth call to foursquare and save the
access token they give us in a cookie. Raise an error if they return one."""
url = self.foursquareAccessTokenUrl(code)
httpResponse = urllib2.urlopen(url)
result = json.decode(httpResponse.read())
if 'access_token' in result:
access_token = str(result['a | ccess_token'])
else:
raise FoursquareException(result)
self.setCookie(self.OAUTH_COOKIE, access_token)
def getFoursquareCheckins(self, accessToke | n):
"""Get the list of the signed-in user's checkins, per
https://developer.foursquare.com/docs/users/checkins"""
return self.getFoursquareApi('users/self/checkins', accessToken)
def foursquareApiUrl(self, apiPath, accessToken):
"""Return a complete URL to the relevant foursquare API endpoint."""
baseUrl = 'https://api.foursquare.com/v2/' + apiPath # TODO: make this a cfg value
return baseUrl + '?' + urllib.urlencode({
'v': self.cfg('apiversion'),
'oauth_token': accessToken
})
def getFoursquareApi(self, apiPath, accessToken):
"""Given a partial foursquare API endpoint path and an auth token,
make a request to the endpoint, parse the result, and return it."""
url = self.foursquareApiUrl(apiPath, accessToken)
request = urllib2.urlopen(url)
content = request.read()
# Criminy. http://stackoverflow.com/a/1020931/87990
encoding = request.headers['content-type'].split('charset=')[-1]
ucontent = unicode(content, encoding)
jsonResult = json.decode(ucontent)
jsonResult['encoding'] = encoding
return self._checkForApiErrors(jsonResult)
def _checkForApiErrors(self, jsonResult):
"""Inspect a parsed json response for any errors as described here:
https://developer.foursquare.com/overview/responses; if any exist,
raise an error."""
if 'meta' not in jsonResult:
raise FoursquareApiException('API result is missing "meta" member', jsonResult)
return jsonResult |
sbesson/zeroc-ice | java/test/Ice/operations/run.py | Python | gpl-2.0 | 1,426 | 0.011921 | #!/usr/bin/env python
# **********************************************************************
#
# Copyright (c) 2003-2013 ZeroC, Inc. All rights reserved.
#
# This copy of Ice is licensed to you under the terms described in the
# ICE_LICENSE file included in this distribution.
#
# **********************************************************************
import os, sys
path = [ ".", "..", "../..", "../../..", "../../../.." ]
head = os.path.dirname(sys.argv[0])
if len(head) > 0:
path = [os.path.joi | n(head, p) for p in path]
path = [os.path.abspath(p) for p in path if os.path.exists(os.path.join(p, "scripts", "TestUtil.py")) ]
if len(path) == 0:
raise Runti | meError("can't find toplevel directory!")
sys.path.append(os.path.join(path[0], "scripts"))
import TestUtil
print("tests with regular server.")
TestUtil.clientServerTest(additionalClientOptions = "--Ice.Warn.AMICallback=0")
print("tests with AMD server.")
TestUtil.clientServerTest(additionalClientOptions = "--Ice.Warn.AMICallback=0", server="test.Ice.operations.AMDServer")
print("tests with TIE server.")
TestUtil.clientServerTest(additionalClientOptions = "--Ice.Warn.AMICallback=0", server="test.Ice.operations.TieServer")
print("tests with AMD TIE server.")
TestUtil.clientServerTest(additionalClientOptions = "--Ice.Warn.AMICallback=0", server="test.Ice.operations.AMDTieServer")
print("tests with collocated server.")
TestUtil.collocatedTest()
|
fregaham/manaclash | cost.py | Python | gpl-3.0 | 8,669 | 0.006575 | # Copyright 2011 Marek Schmidt
#
# This file is part of ManaClash
#
# ManaClash is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ManaClash is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ManaClash. If not, see <http://www.gnu.org/licenses/>.
#
#
from actions import *
class Cost:
def __init__ (self):
pass
def get_text(self, game, obj, player):
return "Pay Cost"
def pay(self, game, obj, effect, player):
game.process_returns_push(False)
def canPay(self, game, obj, player):
return True
def __str__ (self):
return "Cost"
def mana_parse(m):
cs = "WGRUB"
ret = {}
ret[None] = 0
for c in cs:
ret[c] = 0
for c in m:
if c in cs:
ret[c] = ret[c] + 1
elif c in "0123456789":
ret[None] = ret[None] + int(c)
return ret
def mana_converted_cost(m):
cs = "WGRUB"
ret = 0
for c in m:
if c in cs:
ret += 1
elif c in "0123456789":
ret += int(c)
return ret
def mana_format(mp):
cs = "WGRUB"
ret = ""
for c in cs:
ret += c * mp[c]
colorless = mp[None]
while colorless > 9:
ret += "9"
colorless -= 9
ret += str(colorless)
if ret == "0":
ret = ""
return ret
def mana_greater_than(m1, m2):
mp1 = mana_parse(m1)
mp2 = mana_parse(m2)
excess = 0
for c in "WGRUB":
if mp1[c] >= mp2[c]:
excess += mp1[c] - mp2[c]
else:
return False
return excess + mp1[None] >= mp2[None]
def mana_diff (m1, m2): |
mp1 = mana_parse(m1)
mp2 = mana_parse(m2)
ret = {}
excess = 0
for c in "WGRUB":
ret[c] = mp1[c] - mp2[c]
if mp1[None] >= mp2[None]:
ret[None] = mp1[None] - mp2[None]
else:
ret[None] = 0
x = mp2[None] - mp1[None]
for c in "WGRUB":
if ret[c] >= x:
ret[c] -= x
| x = 0
else:
x -= ret[c]
ret[c] = 0
assert x == 0
return mana_format(ret)
class ManaCost(Cost):
def __init__ (self, manacost):
Cost.__init__(self)
self.manacost = manacost
def get_text(self, game, obj, player):
return "Pay " + self.manacost
def pay(self, game, obj, effect, player):
# print("paying cost, manapool: %s, manacost: %s" % (player.manapool, self.manacost))
player.manapool = mana_diff (player.manapool, self.manacost)
# print("after payed: manapool: %s" % (player.manapool))
game.process_returns_push(True)
def canPay(self, game, obj, player):
return mana_greater_than(player.manapool, self.manacost)
def __str__ (self):
return self.manacost
class TapCost(Cost):
def __init__ (self, obj_id):
Cost.__init__(self)
def __str__ (self):
return "T"
class TapSelectorCostProcess:
def __init__ (self, selector, obj, player):
self.selector = selector
self.obj_id = obj.id
self.player_id = player.id
def next(self, game, action):
obj = game.obj(self.obj_id)
player = game.obj(self.player_id)
if action is None:
actions = []
for o in self.selector.all(game, obj):
if o.tapped:
continue
_p = Action ()
_p.object_id = o.id
_p.text = "Tap %s" % str(o)
actions.append (_p)
if len(actions) > 0:
return ActionSet (player.id, "Choose %s to tap" % self.selector, actions)
else:
game.process_returns_push(False)
else:
game.doTap(game.obj(action.object_id))
game.process_returns_push(True)
class TapSelectorCost(Cost):
def __init__ (self, selector):
Cost.__init__ (self)
self.selector = selector
def get_text(self, game, obj, player):
return "Tap %s" % self.selector
def pay(self, game, obj, effect, player):
game.process_push(TapSelectorCostProcess(self.selector, obj, player))
def canPay(self, game, obj, player):
os = [x for x in self.selector.all(game, obj)]
return len(os) > 0
def __str__ (self):
return "tap %s" % self.selector
class SacrificeSelectorCostProcess:
def __init__ (self, selector, obj, effect, player):
self.selector = selector
self.obj_id = obj.id
self.effect_id = effect.id
self.player_id = player.id
def next(self, game, action):
obj = game.obj(self.obj_id)
player = game.obj(self.player_id)
effect = game.obj(self.effect_id)
if action is None:
actions = []
for o in self.selector.all(game, obj):
if o.get_controller_id() != player.get_id():
continue
_p = Action ()
_p.object_id = o.id
_p.text = "Sacrifice %s" % str(o)
actions.append (_p)
if len(actions) > 0:
return ActionSet (player.id, "Choose %s to sacrifice" % self.selector, actions)
else:
game.process_returns_push(False)
else:
obj = game.obj(action.object_id)
effect.slots["sacrificed"] = game.create_lki(obj)
game.doSacrifice(obj)
game.process_returns_push(True)
class SacrificeSelectorCost(Cost):
def __init__ (self, selector):
Cost.__init__ (self)
self.selector = selector
def get_text(self, game, obj, player):
return "Sacrifice %s" % self.selector
def pay(self, game, obj, effect, player):
game.process_push(SacrificeSelectorCostProcess(self.selector, obj, effect, player))
def __str__ (self):
return "sacrifice %s" % self.selector
class DiscardXProcess:
def __init__ (self, selector, obj, effect, player):
self.selector = selector
self.obj_id = obj.id
self.effect_id = effect.id
self.player_id = player.id
def next(self, game, action):
obj = game.obj(self.obj_id)
player = game.obj(self.player_id)
effect = game.obj(self.effect_id)
if action is None:
actions = []
hand = game.get_hand(player)
for o in hand.objects:
if self.selector.contains(game, obj, o):
_p = Action ()
_p.object_id = o.id
_p.text = "Discard %s" % str(o)
actions.append (_p)
if len(actions) > 0:
return ActionSet (player.id, "Discard %s" % self.selector, actions)
else:
game.process_returns_push(False)
else:
game.doDiscard(player, game.obj(action.object_id), obj)
game.process_returns_push(True)
class DiscardX(Cost):
def __init__ (self, selector):
Cost.__init__(self)
self.selector = selector
def get_text(self, game, obj, player):
return "Discard " + str(self.selector)
def pay(self, game, obj, effect, player):
game.process_push(DiscardXProcess(self.selector, obj, effect, player))
def __str__ (self):
return "discard " + str(self.selector)
class PayLifeCost(Cost):
def __init__ (self, n):
Cost.__init__ (self)
self.n = n
def get_text(self, game, obj, player):
return "Pay %d life" % self.n
def pay(self, game, obj, effect, player):
game.process_returns_push(True)
game.doPayLife(player, self.n)
def __str__ (self):
return "pay %d life" % self.n
class Pa |
mjball/Singularity | scripts/logfetch/logfetch_base.py | Python | apache-2.0 | 2,462 | 0.017059 | import os
import sys
import gzip
from datetime import datetime
from termcolor import colored
from singularity_request import get_json_response
BASE_URI_FORMAT = '{0}{1}'
REQUEST_TASKS_FORMAT = '/history/request/{0}/tasks'
ACTIVE_TASKS_FORMAT = '/history/request/{0}/tasks/active'
def unpack_logs(logs):
for zipped_file in logs:
try:
if os.path.isfile(zipped_file):
file_in = gzip.open(zipped_file, 'rb')
unzipped = zipped_file.replace('.gz', '.log')
file_out = open(unzipped, 'wb')
file_out.write(file_in.read())
file_out.close()
file_in.close
os.remove(zipped_file)
sys.stderr.write(colored('Unpacked {0}'.format(zipped_file), 'green') + '\n')
except:
sys.stderr.write(colored('Could not unpack {0}'.format(zipped_file), 'red') + '\n')
continue
def base_uri(args):
if not args.singularity_uri_base:
exit("Specify a base uri for Singularity (-u)")
uri_prefix = "" if args.singularity_uri_base.startswith(("http://", "https | ://")) else "http://"
uri = BASE_URI_FORMAT.format(uri_prefix, args.singularity_uri_base)
return uri
def tasks_for_request(args):
if args.requestId and args.deployId:
tasks = [task["taskId"]["id"] for task i | n all_tasks_for_request(args) if (task["taskId"]["deployId"] == args.deployId)]
else:
tasks = [task["taskId"]["id"] for task in all_tasks_for_request(args)]
if hasattr(args, 'task_count'):
tasks = tasks[0:args.task_count]
return tasks
def all_tasks_for_request(args):
uri = '{0}{1}'.format(base_uri(args), ACTIVE_TASKS_FORMAT.format(args.requestId))
active_tasks = get_json_response(uri)
if hasattr(args, 'start_days'):
uri = '{0}{1}'.format(base_uri(args), REQUEST_TASKS_FORMAT.format(args.requestId))
historical_tasks = get_json_response(uri)
if len(historical_tasks) == 0:
return active_tasks
elif len(active_tasks) == 0:
return historical_tasks
else:
return active_tasks + [h for h in historical_tasks if is_in_date_range(args, int(str(h['updatedAt'])[0:-3]))]
else:
return active_tasks
def is_in_date_range(args, timestamp):
timedelta = datetime.utcnow() - datetime.utcfromtimestamp(timestamp)
if args.end_days:
if timedelta.days > args.start_days or timedelta.days <= args.end_days:
return False
else:
return True
else:
if timedelta.days > args.start_days:
return False
else:
return True
|
cako/notorius | src/image_label.py | Python | gpl-3.0 | 15,994 | 0.004627 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
#==============================================================================#
# #
# Copyright 2011 Carlos Alberto da Costa Filho #
# #
# This file is part of Notorius. #
# #
# Notorius is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# Notorius is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
#==============================================================================#
""" Image Label. """
import datetime
from PyQt4 import QtCore, QtGui
from note import Note
from constants import *
from icons import *
class ImageLabel(QtGui.QLabel):
"""
The ImageLabel class holds PDF QPixmap to be displayed in DocumentWidget.
"""
remove_trigger = QtCore.pyqtSignal()
toggle_source_trigger = QtCore.pyqtSignal()
set_clipboard_trigger = QtCore.pyqtSignal(QtCore.QString)
change_scale_trigger = QtCore.pyqtSignal(float)
change_page_trigger = QtCo | re.pyqtSignal(int)
show_search_trigger = QtCore.pyqtSignal()
hide_search_trigger = QtCore.pyqtSignal()
def __init__(self, parent = None):
super(ImageLabel, self).__init__()
self.parent = parent
self.preamble = PREAMBLE
self.note_pos = QtCore.QPointF()
self.note_icon_pos = QtCo | re.QPoint()
self.current_uid = 0
self.closest_id = 0
self.notes = {}
self.move = False
self.drag = False
self.overscroll = 0
self.control = False
self.noteImage = QtGui.QImage(':img/note22.png')
self.rubber_band = QtGui.QRubberBand( QtGui.QRubberBand.Rectangle, self)
self.drag_position = QtCore.QPoint()
self.setMouseTracking(True)
self.setAcceptDrops(True)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
QtGui.QToolTip.setFont(QtGui.QFont('SansSerif', 10))
self.setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.connect(self,
QtCore.SIGNAL("customContextMenuRequested(const QPoint &)"),
self.contextMenu)
self.add_menu = QtGui.QMenu()
self.addNoteAction = QtGui.QAction(self)
self.addNoteAction.setText("Add annotation")
self.connect(self.addNoteAction, QtCore.SIGNAL("triggered()"),
self.slot_add_note)
self.add_menu.addAction(self.addNoteAction)
self.change_menu = QtGui.QMenu()
self.editNoteAction = QtGui.QAction(self)
self.editNoteAction.setText("Edit annotation")
self.connect(self.editNoteAction, QtCore.SIGNAL("triggered()"),
self.slot_edit_note)
self.change_menu.addAction(self.editNoteAction)
self.moveNoteAction = QtGui.QAction(self)
self.moveNoteAction.setText("Move annotation")
self.connect(self.moveNoteAction, QtCore.SIGNAL("triggered()"),
self.slot_move_note)
self.change_menu.addAction(self.moveNoteAction)
self.removeNoteAction = QtGui.QAction(self)
self.removeNoteAction.setText("Remove annotation")
self.connect(self.removeNoteAction, QtCore.SIGNAL("triggered()"),
self.slot_remove_note)
self.change_menu.addAction(self.removeNoteAction)
self.setFocusPolicy(QtCore.Qt.StrongFocus)
def dragEnterEvent(self, event):
if event.mimeData().hasUrls():
event.accept()
else:
event.ignore()
def dropEvent(self, event):
if event.mimeData().hasUrls():
event.setDropAction(QtCore.Qt.CopyAction)
event.accept()
files = []
for url in event.mimeData().urls():
files.append(str(url.toLocalFile()))
self.emit(QtCore.SIGNAL("dropped"), files)
else:
event.ignore()
def keyPressEvent(self, event):
if self.parent.Document:
if event.modifiers() == QtCore.Qt.ControlModifier:
if ( event.key() == QtCore.Qt.Key_Plus or
event.key() == QtCore.Qt.Key_Equal):
self.change_scale_trigger.emit(self.parent.scale + 0.25)
elif ( event.key() == QtCore.Qt.Key_Minus and
self.parent.scale > 0.25):
self.change_scale_trigger.emit(self.parent.scale - 0.25)
elif event.key() == QtCore.Qt.Key_0:
self.change_scale_trigger.emit(1.0)
return
if (event.matches(QtGui.QKeySequence.Find) or
event.key() == QtCore.Qt.Key_Slash):
self.show_search_trigger.emit()
elif event.key() == QtCore.Qt.Key_Escape:
self.hide_search_trigger.emit()
elif event.key() == QtCore.Qt.Key_Left:
page = self.parent.page + 1 - 1 + self.parent.offset
self.change_page_trigger.emit(page % self.parent.num_pages)
elif event.key() == QtCore.Qt.Key_Right:
page = self.parent.page + 1 + 1 + self.parent.offset
self.change_page_trigger.emit(page % self.parent.num_pages)
elif event.key() == QtCore.Qt.Key_Down:
bar = self.parent.parent.verticalScrollBar()
bar.setValue(bar.value() + 120)
elif event.key() == QtCore.Qt.Key_Up:
bar = self.parent.parent.verticalScrollBar()
bar.setValue(bar.value() - 120)
def mouseMoveEvent(self, event):
"""
Event handling mouse movement.
"""
if self.parent.Document is None:
return
try:
note = self.notes[self.closest_id]
has_note = True
except KeyError:
has_note = False
width = self.pt2px(self.parent.CurrentPage.pageSizeF())[0]
x_offset = (self.rect().width() - width)/2.0
if has_note and self.drag:
#print 'Drag note %d' %note.uid
note.pos = self.px2pt(event.x() - x_offset, event.y())
self.parent.update_image()
return
#if has_note and self.find_closest(event.x(), event.y()):
#note.update()
#img_path = note.filename.rstrip('tex') + 'border.png'
#QtGui.QToolTip.showText(event.globalPos(),
#'Note %d: <br /> <img src="%s">'
#% (note.uid, img_path), self)
if (event.x() >= x_offset) and (event.x() <= width + x_offset):
try:
x1 = self.drag_position.x()
y1 = self.drag_position.y()
x2 = event.x()
y2 = event.y()
if x1 > x2:
x1, x2 = x2, x1
if y1 > y2:
y1, y2 = y2, y1
#print QtCore.QRect(QtCore.QPoint(x1, y1), QtCore.QPoint(x2, y2))
self.rubber_band.setGeometry(QtCore.QRect(QtCore.QPoint(x1, |
ibc/MediaSoup | worker/deps/catch/.conan/build.py | Python | isc | 3,044 | 0.001643 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import re
from cpt.packager import ConanMultiPackager
from cpt.ci_manager import CIManager
from cpt.printer import Printer
class BuilderSettings(object):
@property
def username(self):
""" Set catchorg as package's owner
"""
return os.getenv("CONAN_USERNAME", "catchorg")
@property
def login_username(self):
""" Set Bintray login username
"""
return os.getenv("CONAN_LOGIN_USERNAME", "horenmar")
@property
def upload(self):
""" Set Catch2 repository to be used on upload.
The upload server address could be customized by env var
CONAN_UPLOAD. If not defined, the method will check the branch name.
Only master or CONAN_STABLE_BRANCH_PATTERN will be accepted.
The master branch will be pushed to testing channel, because it does
not match the stable pattern. Otherwise it will upload to stable
channel.
"""
return os.getenv("CONAN_UPLOAD", "https://api.bintray.com/conan/catchorg/catch2")
@property
def upload_only_when_stable(self):
""" Force to upload when running over tag branch
"""
return os.getenv("CONAN_UPLOAD_ONLY_WHEN_STABLE", "True").lower() in ["true", "1", "yes"]
@property
def stable_branch_pattern(self):
""" Only upload the package the branch name is like a tag
"""
return os.getenv("CONAN_STABLE_BRANCH_PATTERN", r"v\d+\.\d+\.\d+")
@property
def reference(self):
""" Read project version from branch create Conan reference
"""
return os.getenv("CONAN_REFERENCE", "Catch2/{}".format(self._version))
@property
def channel(self):
""" Default Conan package channel when not stable
"""
return os.getenv("CONAN_CHANNEL", "testing")
@property
def _version(self):
""" Get version name from cmake file
"""
pattern = re.compile(r"project\(Catch2 LANGUAGES CXX VERSION (\d+\.\d+\.\d+)\)")
version = "latest"
with open("CMakeLists.txt") as file:
for line in fi | le:
result = pattern.search(line)
if result:
version = result.group(1)
return version
@property
d | ef _branch(self):
""" Get branch name from CI manager
"""
printer = Printer(None)
ci_manager = CIManager(printer)
return ci_manager.get_branch()
if __name__ == "__main__":
settings = BuilderSettings()
builder = ConanMultiPackager(
reference=settings.reference,
channel=settings.channel,
upload=settings.upload,
upload_only_when_stable=settings.upload_only_when_stable,
stable_branch_pattern=settings.stable_branch_pattern,
login_username=settings.login_username,
username=settings.username,
test_folder=os.path.join(".conan", "test_package"))
builder.add()
builder.run()
|
d-e-e-p/generate_nametags_with_barcodes | generate_nametags_with_barcodes.py | Python | gpl-3.0 | 22,514 | 0.014302 | #!/usr/bin/python
#
# generate_nametags_with_barcodes.py
# Copyright (C) 2016 Sandeep M
#
# every year an elementary school in california runs a festival where families
# sign up for parties and events, as well as bid for auctions and donations.
# each family is issued some stickers with unique barcode to make it easier
# to sign up.
#
# i couldn't figure out how to get avery on-line mailmerge to do all i wanted
# (scale fonts to fit, conditionally print parent's names, repeat labels etc)
# so here we are.
#
# uses:
# pylabels, a Python library to create PDFs for printing labels.
# Copyright (C) 2012, 2013, 2014 Blair Bonnett
#
# ReportLab open-source PDF Toolkit
# (C) Copyright ReportLab Europe Ltd. 2000-2015
#
# openpyxl, a Python library to read/write Excel 2010 xlsx/xlsm/xltx/xltm files.
#
# generate_nametags_with_barcodes.py is free software:
# you can redistribute it and/or modify it under the terms of the
# GNU General Public License as published by the Free Software Foundation,
# either version 3 of the License, or (at your option) any later version.
#
# generate_nametags_with_barcodes.py is distributed in the hope that it
# will be useful, but WITHOUT ANY # WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
# ok, here we go:
from reportlab.graphics import renderPDF
from reportlab.graphics import shapes
from reportlab.graphics.barcode import code39, code128, code93
from reportlab.graphics.barcode import eanbc, qr, usps
from reportlab.graphics.shapes import Drawing
from reportlab.lib import colors
from reportlab.lib.pagesizes import letter
from reportlab.lib.units import mm, inch
from reportlab.pdfbase.pdfmetrics import registerFont, stringWidth
from reportlab.pdfbase.ttfonts import TTFont
from reportlab.pdfgen import canvas
from reportlab.graphics.barcode import getCodes, getCodeNames, createBarcodeDrawing
import labels
import os.path
import random
random.seed(187459)
# for excel reading
from openpyxl import load_workbook
from pprint import pprint
# for utils
from collections import OrderedDict
import re
#----------------------------------------------------------------------
# Create a page based on Avery 5160:
# portrait (8.5" X 11") sheets with 3 columns and 10 rows of labels.
#
#----------------------------------------------------------------------
def createAvery5160Spec():
f = 25.4 # conversion factor from inch to mm
# Compulsory arguments.
sheet_width = 8.5 * f
sheet_height = 11.0 * f
columns = 3
rows = 10
label_width = 2.63 * f
label_height = 1.00 * f
# Optional arguments; missing ones will be computed later.
left_margin = 0.19 * f
column_gap = 0.12 * f
right_margin = 0
top_margin = 0.50 * f
row_gap = 0
bottom_margin = 0
# Optional arguments with default values.
left_padding = 1
right_padding = 1
top_padding = 1
bottom_padding = 1
corner_radius = 2
padding_radius = 0
background_filename="bg.png"
#specs = labels.Specification(210, 297, 3, 8, 65, 25, corner_radius=2)
# units = mm !
specs = labels.Specification(
sheet_width, sheet_height,
columns, rows,
label_width, label_height,
left_margin = left_margin ,
column_gap = column_gap ,
# right_margin = right_margin ,
top_margin = top_margin ,
row_gap = row_gap ,
# bottom_margin = bottom_margin ,
left_padding = left_padding ,
right_padding = right_padding ,
top_padding = top_padding ,
bottom_padding = bottom_padding ,
corner_radius = corner_radius ,
padding_radius = padding_radius ,
#background_filename=background_filename,
)
return specs
#----------------------------------------------------------------------
# adjust fontsize down until it fits a width/height limit
# should really range for value instead of timidly crepping towards target
#----------------------------------------------------------------------
def fit_text_in_area(the_text,font_name,text_width_limit,text_height_limit):
font_size = text_height_limit
text_width = stringWidth(the_text, font_name, font_size)
while ((text_width > text_width_limit) or (font_size > text_height_limit)):
font_size *= 0.95
text_width = stringWidth(the_text, font_name, font_size)
s = shapes.String(0, 0, the_text, fontName=font_name, fontSize=font_size, textAnchor="start")
#pprint("text_height_limit = " + str(text_height_limit))
#pprint(s.dumpProperties())
#pprint(s)
return s
#----------------------------------------------------------------------
# generate strings of family name from line data
#----------------------------------------------------------------------
def get_labels_from_data (data):
# special pattern to produce blank barcodes
pattern_to_blank = "Zzzzzzz"
#print("write_data")
#pprint(data)
# section1: the actual barcode
num1 = data['parent_id_for_sticker'][0]
#if (num1 > 10000): num1 -= 10000 # DEBUG
# WORKAROUND FOR BUG: the id sometimes has a 0.5 at the end because of the way records were split
#num1 = int(num1) + 1
# section2: family name
str1 = data['child_last_name'][0]
if (pattern_to_blank in str1): str1 = " "
# section3: parent names with & as joiner
str2 = conjunction(data['parent_first_name'])
if (pattern_to_blank in str2): str2 = " "
# section4: child's names
str3 = conjunction(data['child_first_name'])
if (pattern_to_blank in str3): str3 = " "
# section 4 | : label number
#str4 = str(data['index']+1) + "/" + str(data['number_of_stick | ers'] )
str4 = " "
return (num1, str1, str2, str3, str4)
#----------------------------------------------------------------------
# http://stackoverflow.com/questions/21217846/python-join-list-of-strings-with-comma-but-with-some-conditions-code-refractor
#----------------------------------------------------------------------
def conjunction(l, threshold = 5):
length = len(l)
l = map(str,l)
if length <= 2: return " & ".join(l)
elif length < threshold: return ", ".join(l[:-1]) + " & " + l[-1]
elif length == threshold: return ", ".join(l[:-1]) + " & 1 other"
else: return ", ".join(l[:t-1]) + " & +{} others".format(length - (t - 1))
#----------------------------------------------------------------------
# adjust str height if there are any low-hanging letters (ie decenders)
#----------------------------------------------------------------------
def get_font_height(size,str):
pattern = re.compile(r'[gjpqy]')
if pattern.findall(str):
size *= 1.1
return size
#----------------------------------------------------------------------
# Create a callback function to draw each label.
# This will be given the ReportLab drawing object to draw on,
# the dimensions in points, and the data to put on the nametag
#----------------------------------------------------------------------
def write_data(label, width, height, data):
(num1, str1, str2, str3, str4) = get_labels_from_data(data)
pad = 10;
# section 1 : barcode
D = Drawing(width,height)
d = createBarcodeDrawing('Code128', value=num1, barHeight=0.4*inch, humanReadable=True, quiet=False)
#d = createBarcodeDrawing('I2of5', value=the_num, barHeight=10*mm, humanReadable=True)
barcode_width = d.width
barcode_height = d.height
#d.rotate(-90)
#d.translate( - barcode_height ,pad) # translate
d.translate( width-barcode_width-pad/2.0 ,0) # translate
#pprint(d.dumpProperties())
#D.add(d)
#label.add(D)
label.add(d)
rect = shapes.Rect(0, pad, barcode_width + pad, barcode_height+pad)
rect.fillColor = None
rect.strokeColor = random.choice((colors.blue, colors.red, colors.green))
#rect.strokeWidth = d.borderStrokeWidth
#label.add(rect)
# section 2 : room number
#the_text = "gr" + str(data[' |
RiccardoRossi/pyKratos | stokes_ex/square_cavity.py | Python | bsd-2-clause | 4,711 | 0.011675 | from __future__ import print_function, absolute_import, division
import sys
sys.path.append("..")
print(sys.path)
from numpy import *
from pyKratos import *
#example = "cavity"
#example = "gravity"
#example = "shear_x"
example = "inlet"
# add variables to be allocated from the list in variables.py
solution_step_variables = [
VELOCITY_X,
VELOCITY_Y,
PRESSURE,
IS_LAGRANGIAN,
EXTERNAL_FORCE_X,
EXTERNAL_FORCE_Y
]
if example == "gravity":
property_list = {
0: {VISCOSITY: 1.0,
DENSITY: 1.0,
BODY_FORCE_X: 0.0,
BODY_FORCE_Y: -10.0,
}
}
else:
property_list = {
0: {VISCOSITY: 1.0,
DENSITY: 1.0,
BODY_FORCE_X: 0.0,
BODY_FORCE_Y: 0.0,
}
}
#defining a 1 by 1 square
nx = 11
dx = 1.0/(nx-1)
ny = 11
dy = 1.0/(ny-1)
import generate_square
node_list, element_connectivities,face_connectivities = generate_square.GenerateSquare(nx, dx, ny, dy)
#import py_kratos
buffer_size = 3 # store current step and 2 in the past
model_part = ModelPart(buffer_size, solution_step_variables)
model_part.AddNodes(node_list)
model_part.AddProperties(property_list)
model_part.AddElements("stokes_element_2d", element_connectivities)
model_part.AddConditions("neumann_face_condition_2d", face_connectivities)
gid_io = GidIO("cavity.mdpa", "cavity")
bottom_nodes = []
left_nodes = []
right_nodes = []
top_nodes = []
for node in model_part.NodeIterators():
if(node.coordinates[0] <= 0.000001):
left_nodes.append(node)
elif node.coordinates[0] >(nx-1)*dx-0.00001:
right_nodes.append(node)
elif(node.coordinates[1] < 0.00001):
bottom_nodes.append(node)
elif(node.coordinates[1] >(ny-1)*dy-0.0001):
top_nodes.append(node)
if example == "cavity":
for node in bottom_nodes:
node.Fix(VELOCITY_X)
node.Fix(VELOCITY_Y)
for node in left_nodes:
node.Fix(VELOCITY_X | )
node.Fix(VELOCITY_Y)
for node in right_nodes:
node.Fix(VELOCITY_X)
node.Fix(VELOCITY_Y)
for node in top_nodes:
node.Fix(VELOCITY_X)
node.Fix(VELOCITY_Y)
node.SetSolutionStepValue(VELOCITY_X,0,1.0)
#fixing the node at the ce | nter of the bottom face
model_part.Nodes[int(nx/2)+1].Fix(PRESSURE)
elif example=="gravity":
for node in bottom_nodes:
node.Fix(VELOCITY_X)
node.Fix(VELOCITY_Y)
for node in left_nodes:
node.Fix(VELOCITY_X)
node.Fix(VELOCITY_Y)
for node in right_nodes:
node.Fix(VELOCITY_X)
node.Fix(VELOCITY_Y)
for node in top_nodes:
pass
elif example == "shear_x":
for node in model_part.NodeIterators():
if(node.coordinates[1] == 0.0):
node.Fix(VELOCITY_X)
node.Fix(VELOCITY_Y)
elif(node.coordinates[1] == 1.0):
node.Fix(VELOCITY_X)
node.Fix(VELOCITY_Y)
node.SetSolutionStepValue(VELOCITY_X,0,1.0)
elif example == "inlet":
for node in model_part.NodeIterators():
if(node.coordinates[0] == 0.0):
node.Fix(VELOCITY_X)
node.Fix(VELOCITY_Y)
node.SetSolutionStepValue(VELOCITY_X,0,1.0)
if(node.coordinates[1] == (ny-1)*dy or node.coordinates[1] == 0.0):
node.Fix(VELOCITY_X)
node.Fix(VELOCITY_Y)
node.SetSolutionStepValue(VELOCITY_X,0,0.0) ##this overwrites the one before on corner nodes
for node in right_nodes:
node.SetSolutionStepValue(EXTERNAL_FORCE_X,0,-100.0)
node.SetSolutionStepValue(EXTERNAL_FORCE_Y,0,0.0)
import gear_scheme
time_scheme = gear_scheme.GearScheme(model_part)
builder_and_solver = builder_and_solver.BuilderAndSolver(
model_part, time_scheme)
strategy = solving_strategy.SolvingStrategy(
model_part, time_scheme, builder_and_solver)
strategy.Initialize()
mesh_name = "test"
gid_io.WriteMesh(model_part, mesh_name)
dt = 0.1
nsteps = 10
for i in range(1,nsteps):
time = i*dt
model_part.CloneTimeStep(time)
print("time = ", time)
strategy.Solve()
gid_io.WriteNodalResults(PRESSURE, model_part.NodeIterators(), time)
gid_io.WriteNodalResults(VELOCITY, model_part.NodeIterators(), time)
#plot_contour.PlotContour(model_part.NodeIterators(), VELOCITY_Y, outname )
#for node in model_part.NodeIterators():
#if(node.coordinates[1] < 0.00001):
#print(node.GetSolutionStepValue(TEMPERATURE,0))
# strategy.SpyMatrix()
#import plot_contour
#plot_contour.PlotContour(model_part.NodeIterators(), TEMPERATURE) |
bomjacob/htxaarhuslan | main/migrations/0022_auto_20161208_2216.py | Python | mit | 679 | 0.001473 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.2 on 2016-12-08 21:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0021_auto_20161208_1214'),
]
operations = [
migrations.AlterField(
model_name='tournam | entteam',
name='name',
field=models.CharField(max_length=255, verbose_name='holdnavn'),
),
migrations.AlterField(
model_name='tournamentteam',
name='profiles',
field=models.ManyToManyField(to='main.Profile', verbose_name='medlemmer | '),
),
]
|
windyuuy/opera | chromium/src/chrome/common/extensions/docs/server2/chained_compiled_file_system.py | Python | bsd-3-clause | 3,233 | 0.007733 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from compiled_file_system import CompiledFileSystem
from file_system import FileNotFoundError
class ChainedCompiledFileSystem(object):
''' A CompiledFileSystem implementation that fetches data from a chain of
CompiledFileSystems that have different file systems and separate cache
namespaces.
The rules for the compiled file system chain are:
- Versions are fetched from the first compiled file system's underlying
file system.
- Each co | mpiled file system is read in the reverse order (the last one is
read first). If the version matches, return the data. Otherwise, read
from the previous compiled file system until the first one is read.
It is used to chain compiled file systems whose underlying file systems are
slightly different. This makes it possible to reuse cached compiled data in
one of them without recompiling everything that | is shared by them.
'''
class Factory(CompiledFileSystem.Factory):
def __init__(self,
factory_and_fs_chain):
self._factory_and_fs_chain = factory_and_fs_chain
def Create(self, populate_function, cls, category=None):
return ChainedCompiledFileSystem(
[(factory.Create(populate_function, cls, category), fs)
for factory, fs in self._factory_and_fs_chain])
def __init__(self, compiled_fs_chain):
assert len(compiled_fs_chain) > 0
self._compiled_fs_chain = compiled_fs_chain
def GetFromFile(self, path, binary=False):
# It's possible that a new file is added in the first compiled file system
# and it doesn't exist in other compiled file systems.
try:
first_compiled_fs, first_file_system = self._compiled_fs_chain[0]
# The first file system contains both files of a newer version and files
# shared with other compiled file systems. We are going to try each
# compiled file system in the reverse order and return the data when
# version matches. Data cached in other compiled file system will be
# reused whenever possible so that we don't need to recompile things that
# are not changed across these file systems.
version = first_file_system.Stat(path).version
for compiled_fs, _ in reversed(self._compiled_fs_chain):
if compiled_fs.StatFile(path) == version:
return compiled_fs.GetFromFile(path, binary)
except FileNotFoundError:
pass
# Try first operation again to generate the correct stack trace
return first_compiled_fs.GetFromFile(path, binary)
def GetFromFileListing(self, path):
if not path.endswith('/'):
path += '/'
try:
first_compiled_fs, first_file_system = self._compiled_fs_chain[0]
version = first_file_system.Stat(path).version
for compiled_fs, _ in reversed(self._compiled_fs_chain):
if compiled_fs.StatFileListing(path) == version:
return compiled_fs.GetFromFileListing(path)
except FileNotFoundError:
pass
# Try first operation again to generate the correct stack trace
return first_compiled_fs.GetFromFileListing(path)
|
breuleux/bugland | bugland/premade.py | Python | bsd-3-clause | 796 | 0.028894 |
from gen import *
from dataset import *
# TETROMINO
tetromino_gen = lambda w, h: TwoGroups("tetrisi/tetriso/tetrist/tetrisl/tetrisj/tetriss/tetrisz",
| 1010, w, h,
n1 = 1, n2 = 2, rot = | True, task = 1)
tetromino = lambda w, h: BugPlacer(tetromino_gen(w, h), True)
tetromino10x10 = tetromino(10, 10)
tetromino16x16 = tetromino(16, 16)
# PENTOMINO
pentomino_gen = lambda w, h: TwoGroups("pentl/pentn/pentp/pentf/penty/pentj/pentn2/pentq/pentf2/penty2",
2020, w, h,
n1 = 1, n2 = 2, rot = True, task = 1)
pentomino = lambda w, h: BugPlacer(pentomino_gen(w, h), True)
pentomino10x10 = pentomino(10, 10)
pentomino16x16 = pentomino(16, 16)
|
jiayuzhou/pyProxSolver | org/jiayu/optimization/smooth.py | Python | gpl-2.0 | 563 | 0.039076 | '''
A set of (smooth) loss functions.
Created on Oct 2, 2014
@author: jiayu.zhou
'''
import numpy as np;
def least_squa | res(w, X, y):
'''
least squares loss.
MATLAB verified function.
f(x) = 1/2 * ||X * w - y||_F^2.
Parameters
----------
w: np.matrix
X: np.matrix
y: np.matrix
Returns
----------
'''
Xw_y = np.dot(X, w) - y;
f = 0.5 * np.linalg.norm(Xw_y, 'fro')**2;
g | = np.dot(X.T, Xw_y);
g = g.reshape(g.shape[0] * g.shape[1] , 1, order = 'F');
return [f, g]; |
Brocade-OpenSource/OpenStack-DNRM-Neutron | neutron/db/migration/alembic_migrations/versions/3b54bf9e29f7_nec_plugin_sharednet.py | Python | apache-2.0 | 2,645 | 0.001134 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""NEC plugin sharednet
Revision ID: 3b54bf9e29f7
Revises: 511471cc46b
Create Date: 2013-02-17 09:21:48.287134
"""
# revision identifiers, used by Alembic.
revision = '3b54bf9e29f7'
down_revision = '511471cc46b'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.nec.nec_plugin.NECPluginV2'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugin=None, options=None):
if not migration.should_run(active_plugin, migration_for_plugins):
return
op.create_table(
'ofctenantmappings',
sa.Column('ofc_id', sa.String(length=255), nullable=False),
sa.Column('quantum_id', sa.String(length=36), nullable=False),
sa.PrimaryKeyConstraint('quantum_id'),
sa.UniqueConstraint('ofc_id')
)
op.create_table(
'ofcnetworkmappings',
sa.Column('ofc_id', sa.String(length=255), nullable=False),
sa.Column('quantum_id', sa.String(length=36), nullable=False),
sa.PrimaryKeyConstraint('quantum_id'),
sa.UniqueConstraint('ofc_id')
)
op.create_table(
'ofcportmappings',
sa.Column('ofc_id', sa.String(length=255), nullable=False),
sa.Column('quantum_id', sa.String(length=36), n | ullable=False),
sa.PrimaryKeyConstraint('quantum_id'),
sa.UniqueConstraint('ofc_id')
)
op.create_table(
'ofcfiltermappings',
sa.Column('ofc_id', sa.String(length=255), nullable=False),
sa.Column('quantum_id', sa.String(length=36), nullable=False),
| sa.PrimaryKeyConstraint('quantum_id'),
sa.UniqueConstraint('ofc_id')
)
def downgrade(active_plugin=None, options=None):
if not migration.should_run(active_plugin, migration_for_plugins):
return
op.drop_table('ofcfiltermappings')
op.drop_table('ofcportmappings')
op.drop_table('ofcnetworkmappings')
op.drop_table('ofctenantmappings')
|
cokelaer/spectrum | test/test_correlog.py | Python | bsd-3-clause | 1,904 | 0.006828 | from spectrum import CORRELOGRAMPSD, CORRELATION, pcorrelogram, marple_data
from spectrum import data_two_freqs
from pylab import log10, plot, savefig, linspace
from numpy.testing import assert_array_almost_equal, assert_almost_equal
def test_correlog():
psd = CORRELOGRAMPSD(marple_data, marple_data, lag=15)
assert_almost_equal(psd[0], 0.138216970)
assert_almost_equal(psd[1000-1], 7.900110787)
assert_almost_equal(psd[2000-1], 0.110103858)
assert_almost_equal(psd[3000-1], 0.222184134)
assert_almost_equal(psd[4000-1], -0.036255277)
assert_almost_equal(psd[4096-1], 0.1391839711)
return psd
def test_correlog_auto_cross():
"""Same as test_correlog but x and y provided"""
psd1 = CORRELOGRAMPSD(marple_data, lag=16)
psd2 = CORRELOGRAMPSD(marple_data, marple_data, lag=16)
assert_array_almost_equal(psd1, psd2)
psd1 = CORRELOGRAMPSD(marple_data, lag=16, correlation_method='CORRELATION')
psd2 = CORRELOGRAMPSD(marple_data, marple_data, lag=16, correlation_method='CORRELATION')
assert_array_almost_equal(psd1, psd2)
def test_correlog_correlation_method():
"""test correlogramPSD playing with method argument"""
psd1 = CORRELOGRAMPSD(marple_data, lag=16, correlation_method='CORRELATION')
psd2 = CORRELOGRAMPSD(marple_data, marple_data, lag=16, correlation_metho | d='xcorr')
assert_array_almost_equal(psd1, psd2)
def test_pcorrelogram_class():
p = pcorrelogram(marple_data, lag=16)
p()
print(p)
p = pcorrelogram(data_two_freqs(), lag=16)
p.plot()
print(p)
def test_CORRELOGRAMPSD_others():
p = CORRELOGRAMPSD(marple_data, marple_data, lag=16, NFFT=None)
def create_figure():
psd = test_correlog()
f = linspace(-0.5, 0.5, len(psd))
psd = cshift(psd, len(psd)/2)
plot(f, 10*log10(psd/max(psd)) | )
savefig('psd_corr.png')
if __name__ == "__main__":
create_figure()
|
gale320/newfies-dialer | newfies/appointment/templatetags/appointment_tags.py | Python | mpl-2.0 | 1,283 | 0.002338 | #
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <info@star2billing.com>
#
from django.template.defaultfilters import register
from appointment.constants import EVENT_STATUS, ALARM_STATUS, ALARM_METHOD
@register.filter(name='event_status')
def event_status(value):
"""Event Status Templatetag"""
if not value:
return ''
STATUS = dict(EVENT_STATUS)
try:
return STATUS[value].encode('utf-8')
except:
return ''
@register.filter(name='ala | rm_status')
def alarm_status(value):
"""Alarm Status Templatetag"""
if not value:
return ''
STATUS = dict(ALARM_STATUS)
try:
return ST | ATUS[value].encode('utf-8')
except:
return ''
@register.filter(name='alarm_method')
def alarm_method(value):
"""Alarm Method Templatetag"""
if not value:
return ''
METHOD = dict(ALARM_METHOD)
try:
return METHOD[value].encode('utf-8')
except:
return ''
|
clchiou/garage | py/g1/bases/g1/bases/times.py | Python | mit | 364 | 0 | __all__ = [
'Units',
'convert',
]
import enum
class Units(enum.Enum):
SECONDS = 0
| MILLISECONDS = -3
MICROSECONDS = -6
NANOSECONDS = -9
def convert(source_unit, target_unit, time):
"""Convert time between units."""
if source_unit is target_unit:
return time
return time * 10**(sourc | e_unit.value - target_unit.value)
|
alejandro-mc/BDM-DDD | value_noisecomplaints/getLoudMusicComp.py | Python | mit | 2,652 | 0.019985 | import pyspark
| import operator
import sys
#311 call 2010 to present csv
#0 Unique Key,Created Date,Closed Date,Agency,Agency Name,
#5 Compla | int Type,Descriptor,Location Type,Incident Zip,Incident Address,
#10 Street Name,Cross Street 1,Cross Street 2,Intersection Street 1,
#14 Intersection Street 2,Address Type,City,Landmark,Facility Type,Status,
#20 Due Date,Resolution Description,Resolution Action Updated Date,
#23 Community Board,Borough,X Coordinate (State Plane),Y Coordinate (State Plane),
#27 Park Facility Name,Park Borough,School Name,School Number,School Region,
#32 School Code,School Phone Number,School Address,School City,School State,
#37 School Zip,School Not Found,School or Citywide Complaint,Vehicle Type,
#41 Taxi Company Borough,Taxi Pick Up Location,Bridge Highway Name,
#44 Bridge Highway Direction,Road Ramp,Bridge Highway Segment,Garage Lot Name,
#48 Ferry Direction,Ferry Terminal Name,Latitude,Longitude,Location
def mapToLots(records):
import rtree
import csv
import os
os.chmod('plutoindex.idx',0777)
os.chmod('plutoindex.dat',0777)
file_index = rtree.index.Rtree('plutoindex')
for record in records:
list_record=[]
for line in csv.reader([record.encode('utf-8')]):
list_record = line
if len(list_record) < 27:
continue
compType = list_record[5].upper()
descriptor = list_record[6].upper()
if compType.count('NOISE') < 1 or descriptor.count('LOUD MUSIC/PARTY') < 1:
continue
xcoord = list_record[25].strip()
ycoord = list_record[26].strip()
if all((xcoord,ycoord)):
#check intersection
xcoord = int(xcoord)
ycoord = int(ycoord)
for lot in file_index.intersection((xcoord,ycoord),objects = True):
yield (lot.object,1)
def mapResUnits(pairs):
import pickle
with open('plutodict','rb') as fi:
pluto_dict = pickle.load(fi)
for pair in pairs:
dict_entry = pluto_dict[pair[0]]
property_value = dict_entry[1]
res_units = dict_entry[0]
if res_units < 1:
continue
yield (property_value, pair[1] / float(res_units))#pair[1] = number of noise complaints
if __name__=='__main__':
if len(sys.argv)<3:
print "Usage: <input files> <output path>"
sys.exit(-1)
sc = pyspark.SparkContext()
calls311 = sc.textFile(sys.argv[1])
output = calls311.mapPartitions(mapToLots).reduceByKey(operator.add).\
mapPartitions(mapResUnits)
output.saveAsTextFile(sys.argv[-1])
|
JeffHoogland/bodhi3packages | python3-efl-i386/usr/lib/python3.4/dist-packages/efl/__init__.py | Python | bsd-3-clause | 56 | 0.035714 |
_ | _version__ = "1.12.0"
__version_inf | o__ = ( 1, 12, 0 )
|
aerospike/aerospike-admin | test/e2e/util.py | Python | apache-2.0 | 6,156 | 0.000975 | # Copyright 2013-2021 Aerospike, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import re
from lib.utils import util
def parse_record(parent_field, record):
field_names = []
field_values = []
for name in record:
if isinstance(record[name], dict):
new_parent_field = parent_field.copy()
new_parent_field.append(name)
names = " ".join(new_parent_field)
if "converted" in record[name]:
field_names.append(names)
field_values.append(record[name]["converted"])
elif "raw" in record[name]:
field_names.append(names)
field_values.append(record[name]["raw"])
else:
# Must have subgroups:
sub_names, sub_values = parse_record(new_parent_field, record[name])
field_names.extend(sub_names)
field_values.extend(sub_values)
else:
raise Exception("Unhandled parsing")
return field_names, field_values
def parse_output(actual_out={}, horizontal=False, header_len=2, merge_header=True):
"""
commmon parser for all show commands will return tuple of following
@param heading : first line of output
@param header: Second line of output
@param params: list of parameters
"""
title = actual_out["title"]
description = actual_out.get("description", "")
data_names = {}
data_values = []
num_records = 0
for group in actual_out["groups"]:
for record in group["records"]:
temp_names, temp_values = parse_record([], record)
# We assume every record has the same set of names
if len(data_names) == 0:
data_names = temp_names
data_values.append(temp_values)
num_records += 1
return title, description, data_names, data_values, num_records
def get_separate_output(in_str=""):
_regex = re.compile(r"((?<=^{).*?(?=^}))", re.MULTILINE | re.DOTALL)
out = re.findall(_regex, in_str)
ls = []
for item in out:
item = remove_escape_sequence(item)
item = "{" + item + "}"
ls.append(json.loads(item))
return ls
def capture_separate_and_parse_output(rc, commands):
actual_stdout = util.capture_stdout(rc.execute, commands)
| separated_stdout = get_separate_output(actual_stdout)
result = parse_output(separated_stdout[0])
return result
def get_merged_header(*lines):
h = [[_f for _f in _h.split(" ") if _f] for _h in lines]
header = []
if len(h) == 0 or any(len(h[i]) != len(h[i + 1]) for i in range(len(h) - 1)):
return header
for idx in range(len(h[0])):
header_i = h[0][idx]
for jdx in range(len(h) - 1):
if | h[jdx + 1][idx] == ".":
break
header_i += " " + h[jdx + 1][idx]
header.append(header_i)
return header
def check_for_subset(actual_list, expected_sub_list):
if not expected_sub_list:
return True
if not actual_list:
return False
for i in expected_sub_list:
if isinstance(i, tuple):
found = False
for s_i in i:
if s_i is None:
found = True
break
if s_i in actual_list:
found = True
break
if not found:
print(i, actual_list)
return False
else:
if i not in actual_list:
print(i)
return False
return True
# Checks that a single expected list has a subset equal to actual_list.
def check_for_subset_in_list_of_lists(actual_list, list_of_expected_sub_lists):
for expected_list in list_of_expected_sub_lists:
if check_for_subset(actual_list, expected_list):
return True
return False
def remove_escape_sequence(line):
ansi_escape = re.compile(r"(\x9b|\x1b\[)[0-?]*[ -\/]*[@-~]")
return ansi_escape.sub("", line)
def check_for_types(actual_lists, expected_types):
def is_float(x):
try:
float(x)
if "." in x:
return True
return False
except ValueError:
return False
def is_int(x):
try:
int(x)
if "." in x:
return False
return True
except ValueError:
return False
def is_bool(x):
if x in ("True", "true", "False", "false"):
return True
return False
def check_list_against_types(a_list):
if a_list is None or expected_types is None:
return False
if len(a_list) == len(expected_types):
for idx in range(len(a_list)):
typ = expected_types[idx]
val = a_list[idx]
if typ == int:
if not is_int(val):
return False
elif typ == float:
if not is_float(val):
return False
elif typ == bool:
if not is_bool(val):
return False
elif typ == str:
if any([is_bool(val), is_int(val), is_float(val)]):
return False
else:
raise Exception("Type is not yet handles in test_util.py", typ)
return True
return False
for actual_list in actual_lists:
if not check_list_against_types(actual_list):
return False
return True
|
ocelot-collab/ocelot | ocelot/cpbd/coord_transform.py | Python | gpl-3.0 | 3,359 | 0.002679 | """
S.Tomin and I.Zagorodnov, 2017, DESY/XFEL
"""
from ocelot.common.globals import *
import logging
logger = logging.getLogger(__name__)
try:
import numexpr as ne
ne_flag = True
except:
logger.debug("coord_transform.py: module NUMEXPR | is not installed. Install it to speed up calculation")
ne_flag = False
def xp_2_xxstg_mad(xp, xxstg, gamref):
# to mad format
N = xp.shape[1]
pref = m_e_eV * np.sqrt(gamref ** 2 - 1)
betaref = np.sqrt(1 - gamref ** -2)
u = np.c_[xp[3], xp[4], xp[5]]
if ne_flag:
sum_u2 = ne.evaluate('sum(u * u, 1)')
gamma = ne.evaluate('sqrt(1 + sum_u2 / m_e_eV ** 2)')
beta = ne.evaluate('sqrt(1 - gamma ** -2)')
else:
gamma = np. | sqrt(1 + np.sum(u * u, 1) / m_e_eV ** 2)
beta = np.sqrt(1 - gamma ** -2)
if np.__version__ > "1.8":
p0 = np.linalg.norm(u, 2, 1).reshape((N, 1))
else:
p0 = np.sqrt(u[:, 0] ** 2 + u[:, 1] ** 2 + u[:, 2] ** 2).reshape((N, 1))
u = u / p0
u0 = u[:, 0]
u1 = u[:, 1]
u2 = u[:, 2]
if ne_flag:
xp0 = xp[0]
xp1 = xp[1]
xp2 = xp[2]
cdt = ne.evaluate('-xp2 / (beta * u2)')
xxstg[0] = ne.evaluate('xp0 + beta * u0 * cdt')
xxstg[2] = ne.evaluate('xp1 + beta * u1 * cdt')
xxstg[5] = ne.evaluate('(gamma / gamref - 1) / betaref')
else:
cdt = -xp[2] / (beta * u2)
xxstg[0] = xp[0] + beta * u0 * cdt
xxstg[2] = xp[1] + beta * u1 * cdt
xxstg[5] = (gamma / gamref - 1) / betaref
xxstg[4] = cdt
xxstg[1] = xp[3] / pref
xxstg[3] = xp[4] / pref
return xxstg
def xxstg_2_xp_mad(xxstg, xp, gamref):
# from mad format
N = xxstg.shape[1]
#pref = m_e_eV * np.sqrt(gamref ** 2 - 1)
betaref = np.sqrt(1 - gamref ** -2)
if ne_flag:
xxstg1 = xxstg[1]
xxstg3 = xxstg[3]
xxstg5 = xxstg[5]
gamma = ne.evaluate('(betaref * xxstg5 + 1) * gamref')
beta = ne.evaluate('sqrt(1 - gamma ** -2)')
pz2pref = ne.evaluate('sqrt(((gamma * beta) / (gamref * betaref)) ** 2 - xxstg1 ** 2 - xxstg3 ** 2)')
else:
gamma = (betaref * xxstg[5] + 1) * gamref
beta = np.sqrt(1 - gamma ** -2)
pz2pref = np.sqrt(((gamma * beta) / (gamref * betaref)) ** 2 - xxstg[1] ** 2 - xxstg[3] ** 2)
u = np.c_[xxstg[1] / pz2pref, xxstg[3] / pz2pref, np.ones(N)]
if np.__version__ > "1.8":
norm = np.linalg.norm(u, 2, 1).reshape((N, 1))
else:
norm = np.sqrt(u[:, 0] ** 2 + u[:, 1] ** 2 + u[:, 2] ** 2).reshape((N, 1))
u = u / norm
u0 = u[:, 0]
u1 = u[:, 1]
u2 = u[:, 2]
if ne_flag:
xxstg0 = xxstg[0]
xxstg2 = xxstg[2]
xxstg4 = xxstg[4]
xp[0] = ne.evaluate('xxstg0 - u0 * beta * xxstg4')
xp[1] = ne.evaluate('xxstg2 - u1 * beta * xxstg4')
xp[2] = ne.evaluate('-u2 * beta * xxstg4')
xp[3] = ne.evaluate('u0 * gamma * beta * m_e_eV')
xp[4] = ne.evaluate('u1 * gamma * beta * m_e_eV')
xp[5] = ne.evaluate('u2 * gamma * beta * m_e_eV')
else:
xp[0] = xxstg[0] - u0 * beta * xxstg[4]
xp[1] = xxstg[2] - u1 * beta * xxstg[4]
xp[2] = -u2 * beta * xxstg[4]
xp[3] = u0 * gamma * beta * m_e_eV
xp[4] = u1 * gamma * beta * m_e_eV
xp[5] = u2 * gamma * beta * m_e_eV
return xp
|
rizkidoank/awsu | awsu/config.py | Python | gpl-3.0 | 11,653 | 0.000944 | """ configuration module for awsu, contains two objects """
import boto3
import sqlite3
import logging
import getpass
import datetime
import configparser
import uuid
import requests
import json
from dateutil.tz import tzutc
from urllib.parse import urlencode, quote_plus
from os import environ
from bs4 import BeautifulSoup
import base64
from lxml import etree
class Credential(object):
""" credential class """
def __init__(self):
self.conn = sqlite3.connect(environ.get('HOME') + '/.aws/config.db')
self.initialize_database('credentials')
def initialize_database(self, table):
cur = self.conn.cursor()
tables = cur.execute(
"SELECT name FROM sqlite_master WHERE type='table'").fetchall()
if not table in tables[0]:
stmt = '''CREATE TABLE %s(
profile text,
access_key text,
secret_key text,
session_token text,
expiration text)
''' % table[0]
cur.execute(stmt)
self.conn.commit()
def get_session(self, profile="default"):
if profile is None:
profile = "default"
cur = self.conn.cursor()
self.session = cur.execute(
"SELECT * FROM credentials WHERE profile=? LIMIT 1", (profile,))
self.session = self.session.fetchone()
if self.session is None or self.is_expired():
if self.is_expired():
cur.execute("DELETE FROM credentials WHERE profile=?", (profile,))
self.conn.commit()
creds = self.get_credentials(profile)
cur.execute("INSERT INTO credentials VALUES(?,?,?,?,?)", creds)
self.conn.commit()
return {
'AWS_ACCESS_KEY_ID': creds[1],
'AWS_SECRET_ACCESS_KEY': creds[2],
'AWS_SESSION_TOKEN': creds[3],
'AWS_SECURITY_TOKEN': creds[3]
}
else:
return {
'AWS_ACCESS_KEY_ID': self.session[1],
'AWS_SECRET_ACCESS_KEY': self.session[2],
'AWS_SESSION_TOKEN': self.session[3],
'AWS_SECURITY_TOKEN': self.session[3]
}
def get_credentials(self, profile="default"):
""" return aws profile environment variables """
if profile is None:
profile = 'default'
# get session token
if profile != 'saml':
session = boto3.Session(profile_name=profile)
sts = boto3.client('sts')
user = User()
token = getpass.getpass("Enter MFA Code : ")
if profile == "default":
res = sts.get_session_token(
DurationSeconds=3600,
SerialNumber=user.mfa,
TokenCode=token
)
elif profile == "saml":
config_file = configparser.RawConfigParser()
config_file.read(environ.get('HOME') + '/.aws/config')
if not config_file.has_section(profile):
config_file.add_section(profile)
username = str(input("Google Email : "))
idp_id = str(input('IDP ID : '))
sp_id = str(input('SP ID : '))
else:
username = config_file.get(profile, 'username')
idp_id = config_file.get(profile, 'idpid')
sp_id = config_file.get(profile, 'spid')
passwd = getpass.getpass('Password : ')
google = GoogleSAML(username, passwd, idp_id, sp_id)
google.auth()
saml_res = google.get_saml_response()
doc = etree.fromstring(base64.b64decode(saml_res))
roles = google.parse_roles(doc)
role_arn, provider = google.pick_one(roles)
config_file.set(profile, 'username', google.username)
config_file.set(profile, 'idpid', google.idp_id)
config_file.set(profile, 'spid', google.sp_id)
config_file.set(profile, 'role_arn', role_arn)
config_file.set( | profile, 'provider', provider)
config_file.set(profile, 'durations', google.duration_seconds)
with open(environ.get('HOME') + '/.aws/config', 'w+') as f:
try:
| config_file.write(f)
finally:
f.close()
print("Assuming " + config_file.get(profile, 'role_arn'))
sts = boto3.client('sts')
res = sts.assume_role_with_saml(
RoleArn=config_file.get(profile, 'role_arn'),
PrincipalArn=config_file.get(profile, 'provider'),
SAMLAssertion=saml_res,
DurationSeconds=config_file.get(profile, 'durations'))
else:
config_file = configparser.RawConfigParser()
config_file.read(environ.get('HOME') + '/.aws/credentials')
role_arn = config_file.get(profile, 'role_arn')
role_name = role_arn.split('/')[-1]
random_identifier = str(uuid.uuid4())[4:]
role_session = ''.join(
[user.username, role_name, random_identifier])
res = sts.assume_role(
RoleArn=role_arn,
RoleSessionName=role_session,
DurationSeconds=3600,
SerialNumber=user.mfa,
TokenCode=token
)
return (
profile,
res['Credentials']['AccessKeyId'],
res['Credentials']['SecretAccessKey'],
res['Credentials']['SessionToken'],
res['Credentials']['Expiration']
)
def clean_environment(self):
""" remove aws environment variables """
for var in list(environ.keys()):
if var.startswith('AWS_'):
del environ[var]
def is_expired(self):
try:
stored_date = self.session[4]
except:
return False
now = datetime.datetime.utcnow()
session_time = datetime.datetime.strptime(
stored_date,
'%Y-%m-%d %H:%M:%S+00:00')
return now > session_time
class User(object):
def __init__(self):
sts = boto3.client('sts')
caller = sts.get_caller_identity()
self.arn = caller['Arn']
self.account_id = caller['Account']
self.username = self.get_username()
self.mfa = self.get_mfa()
def get_username(self):
username = str(self.arn).split('/')[-1]
return username
def get_mfa(self):
mfa = "arn:aws:iam::" + self.account_id + ":mfa/" + self.username
return mfa
class GoogleSAML(object):
def __init__(self, username, passwd, idp_id, sp_id):
""" method for google saml auth init"""
self.username = username
self.password = passwd
self.idp_id = idp_id
self.sp_id = sp_id
self.duration_seconds = 3600
payload = {
'idpid': str(self.idp_id),
'spid': str(self.sp_id),
'forceauthn': 'false'
}
params = urlencode(payload, quote_via=quote_plus)
self.url = "https://accounts.google.com/o/saml2/initsso?" + params
def auth(self):
self.request = requests.Session()
res = self.request.get(self.url)
res.raise_for_status()
page = BeautifulSoup(res.text, 'html.parser')
gaia_loginform = page.find(
'form', {'id': 'gaia_loginform'}).get('action')
payload = {}
payload['gxf'] = page.find('input', {'name': 'gxf'}).get('value')
payload['continue'] = page.find(
'input', {'name': 'continue'}).get('value')
payload['ltmpl'] = page.find('input', {'name': 'ltmpl'}).get('value')
payload['sarp'] = 1
payload['scc'] = 1
payload['oauth'] = page.find('input', {'name': 'oauth'}).get('value')
payload['_utf8'] = page.find('input', {'name': '_utf8'}).get('value')
payload['bgresponse'] = page.find(
'input', {'name': 'bgresponse'}).get('value')
payload['Email'] = self.username
payload['Passwd'] = self.password
|
SEL-Columbia/commcare-hq | corehq/util/zip_utils.py | Python | bsd-3-clause | 1,577 | 0 | import os
import tempfile
from wsgiref.util import FileWrapper
import zipfile
from django.http import HttpResponse
from django.views.generic import View
from corehq.util.view_utils import set_file_download
def make_zip_tempfile(files, compress=True):
compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED
fd, fpath = tempfile.mkstemp()
with os.fdopen(fd, 'w') as tmp:
with zipfile.ZipFile(tmp, "w", compression) as z:
for path, data in files:
z.writestr(path, data)
return fpath
class DownloadZip(View):
compress_zip = None
zip_name = None
@property
def zip_mimetype(self):
if self.compress_zip:
return 'application/zip'
else:
return 'application/x-zip-compressed'
def log_errors(self, errors):
raise NotImplementedError()
def iter_files(s | elf):
raise NotImplementedError()
def check_before_zipping(self):
raise NotImplementedError()
def get(self, request, *args, **kwargs):
error_response = sel | f.check_before_zipping()
if error_response:
return error_response
files, errors = self.iter_files()
fpath = make_zip_tempfile(files, compress=self.compress_zip)
if errors:
self.log_errors(errors)
wrapper = FileWrapper(open(fpath))
response = HttpResponse(wrapper, mimetype=self.zip_mimetype)
response['Content-Length'] = os.path.getsize(fpath)
set_file_download(response, self.zip_name)
return response
|
HaroldMills/Vesper | scripts/old_bird_detector_eval/annotate_old_bird_calls.py | Python | mit | 6,952 | 0.005898 | """
Annotates Old Bird call detections in the BirdVox-70k archive.
The annotations classify clips detected by the Old Bird Tseep and Thrush
detectors according to the archive's ground truth call clips.
This script must be run from the archive directory.
"""
from django.db.models import F
from django.db.utils import IntegrityError
import pandas as pd
# Set up Django. This must happen before any use of Django, including
# ORM class imports.
import vesper.util.django_utils as django_utils
django_utils.set_up_django()
from vesper.django.app.models import (
AnnotationInfo, Clip, Processor, Recording, StringAnnotation, User)
import vesper.django.app.model_utils as model_utils
import scripts.old_bird_detector_eval.utils as utils
# Set this `True` to skip actually annotating the Old Bird detections.
# The script will still compute the classifications and print precision,
# recall, and F1 statistics. This is useful for testing purposes, since
# the script runs considerably faster when it doesn't annotate.
ANNOTATE = True
GROUND_TRUTH_DETECTOR_NAME = 'BirdVox-70k'
# The elements of the pairs of numbers are (0) the approximate start offset
# of a call within an Old Bird detector clip, and (1) the approximate
# maximum duration of a call. The units of both numbers are seconds.
DETECTOR_DATA = (
('Old Bird Tseep Detector Redux 1.1', 'Call.High'),
('Old Bird Thrush Detector Redux 1.1', 'Call.Low'),
)
CLASSIFICATION_ANNOTATION_NAME = 'Classification'
CENTER_INDEX_ANNOTATION_NAME = 'Call Center Index'
CENTER_FREQ_ANNOTATION_NAME = 'Call Center Freq'
SAMPLE_RATE = 24000
def main():
rows = annotate_old_bird_calls()
raw_df = create_raw_df(rows)
aggregate_df = create_aggregate_df(raw_df)
add_precision_recall_f1(raw_df)
add_precision_recall_f1(aggregate_df)
print(raw_df.to_csv())
print(aggregate_df.to_csv())
def annotate_old_bird_calls():
center_index_annotation_info = \
AnnotationInfo.objects.get(name=CENTER_INDEX_ANNOTATION_NAME)
center_freq_annotation_info = \
AnnotationInfo.objects.get(name=CENTER_FREQ_ANNOTATION_NAME)
classification_annotation_info = \
AnnotationInfo.objects.get(name=CLASSIFICATION_ANNOTATION_NAME)
user = User.objects.get(username='Vesper')
sm_pairs = model_utils.get_station_mic_output_pairs_list()
ground_truth_detector = Processor.objects.get(
name=GROUND_TRUTH_DETECTOR_NAME)
rows = []
for detector_name, annotation_value in DETECTOR_DATA:
short_detector_name = detector_name.split()[2]
old_bird_detector = Processor.objects.get(name=detector_name)
window = utils.OLD_BIRD_CLIP_CALL_CENTER_WINDOWS[short_detector_name]
for station, mic_output in sm_pairs:
station_num = int(station.name.split()[1])
print('{} {}...'.format(short_detector_name, station_num))
ground_truth_clips = list(model_utils.get_clips(
station=station,
mic_output=mic_output,
detector=ground_truth_detector,
annotation_name=CLASSIFICATION_ANNOTATION_NAME,
annotation_value=annotation_value))
ground_truth_call_center_indices = \
[c.start_index + c.length // 2 for c in ground_truth_clips]
ground_truth_call_count = len(ground_truth_clips)
old_bird_clips = list(model_utils.get_clips(
station=station,
mic_output=mic_output,
detector=old_bird_detector))
old_bird_clip_count = len(old_bird_clips)
clips = [(c.start_index, c.length) for c in old_bird_clips]
matches = utils.match_clips_with_calls(
clips, ground_truth_call_center_indices, window)
old_bird_call_count = len(matches)
rows.append([
short_detector_name, station_num, ground_truth_call_count,
old_bird_call_count, old_bird_clip_count])
if ANNOTATE:
# Clear any existing annotations.
for clip in old_bird_clips:
model_utils.unannotate_clip(
clip, classification_annotation_info,
creating_user=user)
# Create new annotations.
for i, j in matches:
old_bird_clip = old_bird_clips[i]
call_center_index = ground_truth_call_center_indices[j]
ground_truth_clip = ground_truth_clips[j]
| # Annotate Old Bird clip call center index.
model_utils.annotate_clip(
old_bird_clip, center_index_annotation_info,
str(call_center_index), creating_user=user)
# Get ground truth clip call center frequency.
annotations = \
model_utils.get_clip_annotations(ground_truth_clip)
call_center_freq = ann | otations[CENTER_FREQ_ANNOTATION_NAME]
# Annotate Old Bird clip call center frequency.
model_utils.annotate_clip(
old_bird_clip, center_freq_annotation_info,
call_center_freq, creating_user=user)
model_utils.annotate_clip(
old_bird_clip, classification_annotation_info,
annotation_value, creating_user=user)
return rows
def create_raw_df(rows):
columns = [
'Detector', 'Station', 'Ground Truth Calls', 'Old Bird Calls',
'Old Bird Clips']
return pd.DataFrame(rows, columns=columns)
def create_aggregate_df(df):
data = [
sum_counts(df, 'Tseep'),
sum_counts(df, 'Thrush'),
sum_counts(df, 'All')
]
columns = [
'Detector', 'Ground Truth Calls', 'Old Bird Calls', 'Old Bird Clips']
return pd.DataFrame(data, columns=columns)
def sum_counts(df, detector):
if detector != 'All':
df = df.loc[df['Detector'] == detector]
return [
detector,
df['Ground Truth Calls'].sum(),
df['Old Bird Calls'].sum(),
df['Old Bird Clips'].sum()]
def add_precision_recall_f1(df):
p = df['Old Bird Calls'] / df['Old Bird Clips']
r = df['Old Bird Calls'] / df['Ground Truth Calls']
df['Precision'] = to_percent(p)
df['Recall'] = to_percent(r)
df['F1'] = to_percent(2 * p * r / (p + r))
def to_percent(x):
return round(1000 * x) / 10
if __name__ == '__main__':
main()
|
osroom/osroom | apps/modules/category/apis/theme_category.py | Python | bsd-2-clause | 2,262 | 0.000982 | #!/usr/bin/env python
# -*-coding:utf-8-*-
# @Time : 2017/11/1 ~ 2019/9/1
# @Author : Allen Woo
from flask import request
from apps.core.flask.login_manager import osr_login_required
from apps.configs.sys_config import METHOD_WARNING
from apps.core.blueprint import api
from apps.core.flask.permission import permission_required
from apps.core.flask.response import response_format
from apps.modules.category.process.theme_setting_category import categorys, category_add, category_e | dit, \
category_delete, get_category_type
@api.route('/admin/content/theme-category', methods=['GET', 'POST', 'PUT', 'DELETE'])
@osr_login_required
@permission_required(use_default=False)
def api_theme_category():
"""
GET:
action:<str>, 可以为get_category, get_category_type, 默认get_category
1.获取当前用户指定的type的所有category
| action:<str>, 为get_category
type:<str>, 你设置的那几个类别中的类别,在config.py文件中category, 可在网站管理端设置的
theme_name:<str>
2. 获取所有的type: config.py文件中category的所有CATEGORY TYPE
action:<str>, 为get_category_type
theme_name:<str>
解释:
在分类中(category)又分为几种类型(type)
如: type为post有几个category
POST:
添加文集
name:<str>
type:<str>, 只能是你设置的那几个类别,在config.py文件中category, 或者网站管理设置
theme_name:<str>
PUT:
修改文集
id:<str>, post category id
name:<str>
DELETE:
删除文集名称
ids:<array>, post category ids
"""
if request.c_method == "GET":
if not request.argget.all("action") == "get_category_type":
data = categorys(user_id=0)
else:
data = get_category_type()
elif request.c_method == "POST":
data = category_add(user_id=0)
elif request.c_method == "PUT":
data = category_edit(user_id=0)
elif request.c_method == "DELETE":
data = category_delete(user_id=0)
else:
data = {"msg_type": "w", "msg": METHOD_WARNING, "custom_status": 405}
return response_format(data)
|
shogun-toolbox/shogun | examples/undocumented/python/kernel_wave.py | Python | bsd-3-clause | 846 | 0.030733 | #!/usr/bin/env python
from tools.load import LoadMatrix
from numpy import where
import shogun as sg
lm=LoadMatrix()
traindat = lm.load_numbers('../data/fm_train_real.dat')
testdat = lm.load_numbers('../data/fm_test_real.dat')
parameter_list=[[traindat,testdat, 1.0],[traindat,testdat, 10.0]]
def kernel_wave (fm_train_real=traindat,fm_test_real=testdat, theta=1.0):
feats_train=sg.create_features(fm_train_real)
feats_test=sg.creat | e_features(fm_test_real)
distance = sg.create_distance('EuclideanDistance')
kernel = sg.create_kernel('WaveKernel', theta=theta, distance=distance)
kernel.init(feats_train, feats_train)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
if | __name__=='__main__':
print('Wave')
kernel_wave(*parameter_list[0])
|
spulec/moto | moto/organizations/exceptions.py | Python | apache-2.0 | 2,748 | 0.00182 | from moto.core.exceptions import JsonRESTError
class AccountAlreadyRegisteredException(JsonRESTError):
code = 400
def __init__(self):
super().__init__(
"AccountAlreadyRegisteredException",
"The provided account is already a delegated administrator for your organization.",
)
class AccountNotRegisteredException(JsonRESTError):
code = 400
def __init__(self):
super().__init__(
"AccountNotRegisteredException",
"The provided account is not a registered delegated administrator for your organization.",
)
class AccountNotFoundException(JsonRESTError):
code = 400
def __init__(self):
super().__init__(
"AccountNotFoundException", "You specified an account that doesn't exist."
)
class AWSOrganizationsNotInUseException(JsonRESTError):
code = 400
def __init__(self):
super().__init__(
"AWSOrganizationsNotInUseException",
"Your account is not a member of an organization.",
)
class ConstraintViolationException(JsonRESTError):
code = 400
def __init__(self, message):
super().__init__("ConstraintViolationException", message)
class InvalidInputException(JsonRESTError):
code = 400
def __init__(self, message):
super().__init__("InvalidInputException", message)
class DuplicateOrganizationalUnitException(JsonRESTError):
code = 400
def __init__(self):
super().__init__(
"DuplicateOrganizationalUnitException",
"An OU with the same name already exists.",
)
class DuplicatePolicyException(JsonRESTError):
code = 400
def __init__(self):
super().__init__(
"DuplicatePolicyException", "A policy with the same name already exists."
)
class PolicyTypeAlreadyEnabledException(JsonRESTError):
code = 400
def __init__(self):
super().__init__(
"PolicyTypeAlreadyEnabledException",
"The specified policy type is already enabled.",
)
class PolicyTypeNotEnabledException(JsonRESTError):
code = 400
def __init__(self):
super().__init__(
"PolicyTypeNotEnabledException",
"This operation can be performed only for enabled policy types.",
)
class RootNotFoundExcept | ion(JsonRESTError):
code = 400
def __init__(self):
super().__init__(
"RootNotFoundException", "You specified a root that doesn't exist."
)
class TargetNotFoundException(JsonRESTError):
code = 400
def __init__(self):
super().__init__(
"TargetNotFou | ndException", "You specified a target that doesn't exist."
)
|
mikeing2001/LoopDetection | pox/web/webcore.py | Python | gpl-3.0 | 15,662 | 0.01443 | # Copyright 2011,2012 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
Webcore is a basic web server framework based on the SocketServer-based
BaseHTTPServer that comes with Python. The big difference is that this
one can carve up URL-space by prefix, such that "/foo/*" gets handled by
a different request handler than "/bar/*". I refer to this as "splitting".
You should also be able to make a request handler written without splitting
run under Webcore. This may not work for all request handlers, but it
definitely works for some. :) The easiest way to do this is with the
wrapRequestHandler() function, like so:
from CGIHTTPServer import CGIHTTPRequestHandler as CHRH
core.WebServer.set_handler("/foo", wrapRequestHandler(CHRH))
.. now URLs under the /foo/ directory will let you browse through the
filesystem next to pox.py. If you create a cgi-bin directory next to
pox.py, you'll be able to run executables in it.
For this specific purpose, there's actually a SplitCGIRequestHandler
which demonstrates wrapping a normal request handler while also
customizing it a bit -- SplitCGIRequestHandler shoehorns in functionality
to use arbitrary base paths.
BaseHTTPServer is not very fast and needs to run on its own thread.
It'd actually be great to have a version of this written against, say,
CherryPy, but I did want to include a simple, dependency-free web solution.
"""
from SocketServer import ThreadingMixIn
from BaseHTTPServer import *
from time import sleep
import select
import threading
import random
import hashlib
import base64
from pox.core import core
import os
import posixpath
import urllib
import cgi
import errno
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
log = core.getLogger()
weblog = log.getChild("server")
def _setAttribs (parent, child):
attrs = ['command', 'request_version', 'close_connection',
'raw_requestline', 'requestline', 'path', 'headers', 'wfile',
'rfile', 'server', 'client_address']
for a in attrs:
setattr(child, a, getattr(parent, a))
setattr(child, 'parent', parent)
import SimpleHTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
class SplitRequestHandler (BaseHTTPRequestHandler):
"""
To write HTTP handlers for POX, inherit from this class instead of
BaseHTTPRequestHandler. The interface should be the same -- the same
variables should be set, and the same do_GET(), etc. methods should
be called.
In addition, there will be a self.args which can be specified
when you set_handler() on the server.
"""
# Also a StreamRequestHandler
def __init__ (self, parent, prefix, args):
_setAttribs(parent, self)
self.parent = parent
self.args = args
self.prefix = prefix
self._init()
def _init (self):
"""
This is called by __init__ during initialization. You can
override it to, for example, parse .args.
"""
pass
def handle_one_request (self):
raise RuntimeError("Not supported")
def handle(self):
raise RuntimeError("Not supported")
def _split_dispatch (self, command, handler = None):
if handler is None: handler = self
mname = 'do_' + self.command
if not hasattr(handler, mname):
self.send_error(501, "Unsupported method (%r)" % self.command)
return
me | thod = getattr(handler, mname)
return method()
def log_request (self, code = '-', size = '-'):
weblog.debug(self | .prefix + (':"%s" %s %s' %
(self.requestline, str(code), str(size))))
def log_error (self, fmt, *args):
weblog.error(self.prefix + ':' + (fmt % args))
def log_message (self, fmt, *args):
weblog.info(self.prefix + ':' + (fmt % args))
_favicon = ("47494638396110001000c206006a5797927bc18f83ada9a1bfb49ceabda"
+ "4f4ffffffffffff21f904010a0007002c000000001000100000034578badcfe30b20"
+ "1c038d4e27a0f2004e081e2172a4051942abba260309ea6b805ab501581ae3129d90"
+ "1275c6404b80a72f5abcd4a2454cb334dbd9e58e74693b97425e07002003b")
_favicon = ''.join([chr(int(_favicon[n:n+2],16))
for n in xrange(0,len(_favicon),2)])
class CoreHandler (SplitRequestHandler):
"""
A default page to say hi from POX.
"""
def do_GET (self):
"""Serve a GET request."""
self.do_content(True)
def do_HEAD (self):
"""Serve a HEAD request."""
self.do_content(False)
def do_content (self, is_get):
if self.path == "/":
self.send_info(is_get)
elif self.path.startswith("/favicon."):
self.send_favicon(is_get)
else:
self.send_error(404, "File not found on CoreHandler")
def send_favicon (self, is_get = False):
self.send_response(200)
self.send_header("Content-type", "image/gif")
self.send_header("Content-Length", str(len(_favicon)))
self.end_headers()
if is_get:
self.wfile.write(_favicon)
def send_info (self, is_get = False):
r = "<html><head><title>POX</title></head>\n"
r += "<body>\n<h1>POX Webserver</h1>\n<h2>Components</h2>\n"
r += "<ul>"
for k in sorted(core.components):
v = core.components[k]
r += "<li>%s - %s</li>\n" % (cgi.escape(str(k)), cgi.escape(str(v)))
r += "</ul>\n\n<h2>Web Prefixes</h2>"
r += "<ul>"
m = [map(cgi.escape, map(str, [x[0],x[1],x[3]]))
for x in self.args.matches]
m.sort()
for v in m:
r += "<li><a href='{0}'>{0}</a> - {1} {2}</li>\n".format(*v)
r += "</ul></body></html>\n"
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-Length", str(len(r)))
self.end_headers()
if is_get:
self.wfile.write(r)
class StaticContentHandler (SplitRequestHandler, SimpleHTTPRequestHandler):
# We slightly modify SimpleHTTPRequestHandler to serve from given
# directories and inherit from from Python, but
# modified to serve from given directories and to inherit from
# SplitRequestHandler.
"""
A SplitRequestHandler for serving static content
This is largely the same as the Python SimpleHTTPRequestHandler, but
we modify it to serve from arbitrary directories at arbitrary
positions in the URL space.
"""
server_version = "StaticContentHandler/1.0"
def send_head (self):
# We override this and handle the directory redirection case because
# we want to include the per-split prefix.
path = self.translate_path(self.path)
if os.path.isdir(path):
if not self.path.endswith('/'):
self.send_response(301)
self.send_header("Location", self.prefix + self.path + "/")
self.end_headers()
return None
return SimpleHTTPRequestHandler.send_head(self)
def list_directory (self, dirpath):
# dirpath is an OS path
try:
d = os.listdir(dirpath)
except OSError as e:
if e.errno == errno.EACCES:
self.send_error(403, "This directory is not listable")
elif e.errno == errno.ENOENT:
self.send_error(404, "This directory does not exist")
else:
self.send_error(400, "Unknown error")
return None
d.sort(key=str.lower)
r = StringIO()
r.write("<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 3.2 Final//EN\">\n")
path = posixpath.join(self.prefix, cgi.escape(self.path).lstrip("/"))
r.write("<html><head><title>" + path + "</title></head>\n")
r.write("<body><pre>")
parts = path.rstrip("/").split("/")
r.write('<a href="/">/</a>')
for i,part in enumerate(parts):
link = urllib.quote("/".join(parts[:i+1]))
if i > 0: part += "/"
r.write('<a href="%s">%s</a>' % (link, cg |
zooko/egtp | common/mencode_unittests.py | Python | agpl-3.0 | 13,498 | 0.004075 | #!/usr/bin/env python
#
# Copyright (c) 2001 Autonomous Zone Industries
# Copyright (c) 2002 Bryce "Zooko" Wilcox-O'Hearn
# This file is licensed under the
# GNU Lesser General Public License v2.1.
# See the file COPYING or visit http://www.gnu.org/ for details.
#
__cvsid = '$Id: mencode_unittests.py,v 1.1 2002/06/25 03:54:57 zooko Exp $'
# Python standard library modules
import operator
import random
import traceback
try:
import unittest
except:
class unittest:
class TestCase:
pass
pass
pass
# pyutil modules
import humanreadable
import memutil
# Mnet modules
from mencode import *
class Testy(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_decode_random_illformed_junk(self):
try:
mdecode(string.join(filter(lambda x: x != ':', map(chr, map(random.randrange, [0]*20, [256]*20))), ''))
raise "This shouldn't have decoded without an exception."
except MencodeError:
# Good. That was definitely ill-formed.
pass
def test_decode_other_random_illformed_junk(self):
l = random.randrange(0, 200)
s = str(l) + ':' + "x" * (l-1) # too short. Heh heh.
try:
mdecode(s)
raise "This shouldn't have decoded without an exception."
except MencodeError:
# Good. That was definitely ill-formed.
pass
def test_decode_unknown(self):
try:
decode_unknown('(())', 0)
return 0
except IndexError:
pass
except ValueError:
pass
except MencodeError:
pass
try:
decode_unknown('((111))', 0)
return 0
except IndexError:
pass
except ValueError:
pass
except MencodeError:
pass
assert decode_unknown('((0:))', 0) == (UNKNOWN_TYPE, 5)
assert decode_unknown(')', 0) == (UNKNOWN_TYPE, 0)
assert decode_unknown('1:a2:ab)', 0) == (UNKNOWN_TYPE, 7)
def test_encode_and_decode_string_with_nulls(self):
strwn = "\000\001\000"
def test_encode_and_decode_none(self):
assert mdecode(mencode(None)) == None
def test_encode_and_decode_long(self):
assert mdecode(mencode(-23452422452342L)) == -23452422452342L
def test_encode_and_decode_int(self):
assert mdecode(mencode(2)) == 2
def test_dict_enforces_order(self):
mdecode('(4:dict(3:int1:0)(4:null)(3:int1:1)(4:null))')
try:
mdecode('(4:dict(3:int1:1)(4:null)(3:int1:0)(4:null))')
except MencodeError:
pass
def test_dict_forbids_key_repeat(self):
try:
mdecode('(4:dict(3:int1:1)(4:null)(3:int1:1)(4:null))')
except MencodeError:
pass
def test_decode_unknown_type_not_in_dict(self):
try:
mdecode('(7:garbage)')
return false
except UnknownTypeError:
pass
def test_decode_unknown_type_in_dict(self):
# I strongly disagree with this feature. It violates canonicity (which, as we all know, open up security holes), as well as being potentially confusing to debuggers and to mencode maintainers, and it is currently not needed. --Zooko 2001-06-03
assert mdecode('(4:dict(7:garbage)(3:int1:4)(4:null)(3:int1:5))') == {None: 5}
assert mdecode('(4:dict(4:null)(3:int1:5)(3:int1:4)(7:garbage))') == {None: 5}
def test_MencodeError_in_decode_unknown(self):
try:
mdecode('(4:dict(7:garbage)(2:int1:4)(4:null)(3:int1:5))')
return 0
except MencodeError:
pass
def test_decode_raw_string(self):
assert decode_raw_string('1:a', 0) == ('a', 3)
assert decode_raw_string('0:', 0) == ('', 2)
assert decode_raw_string('10:aaaaaaaaaaaaaaaaaaaaaaaaa', 0) == ('aaaaaaaaaa', 13)
assert decode_raw_string('10:', 1) == ('', 3)
try:
decode_raw_string('11:', 0)
return 0
except IndexError:
pass
except ValueError:
pass
except MencodeError:
pass
try:
decode_raw_string('01:a', 0)
return 0
except IndexError:
pass
except ValueError:
pass
except MencodeError:
pass
try:
decode_raw_string('11', 0)
return 0
except IndexError:
pass
except ValueError:
pass
except MencodeError:
pass
try:
decode_raw_string('h', 0)
return 0
except IndexError:
pass
except ValueError:
pass
except MencodeError:
pass
try:
decode_raw_string('h:', 0)
return 0
except IndexError:
pass
except ValueError:
pass
except MencodeError:
pass
def test_decode_noncanonical_int(self):
try:
mdecode('(3:int2:03)')
assert false, "non canonical integer allowed '03'"
except MencodeError:
pass
try:
mdecode('(3:int2:3 )')
assert false, "non canonical integer allowed '3 '"
except MencodeError:
pass
try:
mdecode('(3:int2: 3)')
assert false, "non canonical integer allowed ' 3'"
except MencodeError:
pass
try:
mdecode('(3:int2:-0)')
assert false, "non canonical integer allowed '-0'"
except MencodeError:
pass
def test_encode_and_decode_hash_key(self):
x = {42: 3}
y = {'42': 3}
assert mdecode(mencode(x)) == x
assert mdecode(mencode(y)) == y
def test_encode_and_decode_list(self):
assert mdecode(mencode([])) == []
def test_encode_and_decode_tuple(self):
assert mdecode(mencode(())) == []
def test_encode_and_decode_dict(self):
assert mdecode(mencode({})) == {}
def test_encode_and_decode_complex_object(self):
spam = [[], 0, -3, -345234523543245234523L, {}, 'spam', None, {'a': 3}, {69: []}]
assert mencode(mdecode(mencode(spam))) == mencode(spam)
assert mdecode(mencode(spam)) == spam
def test_preencoded_thing(self):
thing = {"dirty limmerk": ["there once was a man from peru", "who set out to sail a canoe"]}
pthing = PreEncodedThing(thing)
assert len(mencode(thing)) == len(pthing)
assert mencode(pthing) == mencode(thing)
assert mdecode(mencode(thing)) == mdecode(mencode(pthing))
def test_dict_as_key(self):
try:
mdecode('(4:dict(4:dict)(4:null))')
assert false, "dict cannot be a key but it was allowed by md | ecode"
except MencodeError:
return
|
def test_rej_dict_with_float(self):
try:
s = mencode({'foo': 0.9873})
assert 0, "You can't encode floats! Anyway, the result: %s, is probably not what we meant." % humanreadable.hr(s)
except MencodeError, le:
try:
# print "got exce1: %s" % humanreadable.hr(le)
s2 = mencode({'foo': 0.9873})
assert 0, "You can't encode floats! Anyway, the result: %s, is probably not what we meant." % humanreadable.hr(s2)
except MencodeError, le:
# print "got exce2: %s" % humanreadable.hr(le)
# Good! we want an exception when we try this.
return
def test_rej_float(self):
try:
s = mencode(0.9873)
assert 0, "You can't encode floats! Anyway, the result: %s, is probably not what we meant." % humanreadable.hr(s)
except MencodeError, le:
try:
s2 = mencode(0.9873)
assert 0, "You can't encode floats! Anyway, the result: %s, is probably not what we meant." % humanreadable.hr(s2)
except MencodeError, le:
# Good! we want an excep |
metachris/py2app | py2app_tests/basic_app_with_encoding/package1/subpackage/module.py | Python | mit | 29 | 0 | "package1.subpackage.module" | ||
xvorenda/DAENA | py/alarm.py | Python | gpl-3.0 | 62,009 | 0.004919 | #!/usr/bin/env python
from __future__ import division
import time
import sys
import MySQLdb as mdb
import smtplib
import re
import bz2
import alarm
# Alarm Levels:
# Temperature Alarms
# 0 - No alarm, freezer is in a normal state
# 1 - freezer is in a high temp range, and has been for 30 min
# 2 - freezer is in a high temp range, and has been for 60 min, no more alarms
# 3 - freezer is in a critical temp range, and has been for 10 min, Sound alarm
# every hour
# 4 - freezer is in a critical temp range, and has been for at least 10 min,
# alarm has been silenced
# 5 - freezer is back in high temp range from being in critical range, freezer
# is cooling
# Communication Alarms:
# 6 - communication alarm, there has been no contact with the freezer for 30
# min, or all the data has ended up being "nodata", alarm every hour
# 7 - communication alarm, there has been no contact with the freezer for at
# least 30 min or all the data ends up being "nodata", alarm has been
# silenced
# Handy mysql scripts:
# insert new contact into table freezer_alarm_contacts
# INSERT INTO freezer_alarm_contacts (contact_id, freezer_id) SELECT contacts.contact_id, freezers.freezer_id FROM contacts, freezers WHERE contact_id = %s
#
# insert new freezer into freezer_alarm_contacts
# INSERT INTO freezer_alarm_contacts (contact_id, freezer_id) SELECT contacts.contact_id, freezers.freezer_id FROM contacts, freezers WHERE freezer_id = %s
class alarm(object):
def __init__(self):
# Open the configuration file which has the Alarm Email and database
# information
conf = open("/www/admin/config/db.php", "r")
# Loop through the file
for line in conf:
# Remove the trailing white space (including return character)
line = line.rstrip()
# Dont waste the effort if the line is commented out
if re.search('^//', line):
pass
elif re.search('EMAIL_ADDRESS', line):
#pulls out the email address from the line
self.email = re.sub('[\ \";\)]','', (line.split(',')[1]))
elif re.search('EMAIL_PASSWORD', line):
#pulls out the email password from the line
self.emailPass = (re.sub('[\ \";\)]','', (line.split(',')[1])))
elif re.search('DB_HOST', line):
#pulls out the db host from the line
host =(re.sub('[\ \";\)]','', (line.split(',')[1])))
elif re.search('DB_NAME', line):
#pulls out the database from the line
database =(re.sub('[\ \";\)]','', (line.split(',')[1])))
elif re.search('DB_USER', line):
#pulls out the database user name from the line
dbUser = (re.sub('[\ \";\)]','', (line.split(',')[1])))
elif re.search('DB_PASS', line):
#pulls out the database password from the line
dbPass = (re.sub('[\ \";\)]','', (line.split(',')[1])))
# Debug to make sure the passwords and such are correct
#print "email, emailPass, host, database, dbUser, dbPass", self.email, self.emailPass, host, database, dbUser, dbPass
# Close the file when it is done
conf.close()
# Create the connection to the database
self.conn = mdb.connect(user=dbUser, passwd=dbPass, db=database)
# Initialize write and read cursors to be used with mysql
self.writecursor = self.conn.cursor()
self.readcursor = self.conn.cursor()
# Contants used in program
self.IS_FREEZING = 0
self.IS_NOT_FREEZING = 1
# Alarm Timing Constants
self.SIXTY_SECONDS = 60
self.MINUTES_FOR_CRITICAL_RANGE_REMINDER = 60
self.MINUTES_IN_CRITICAL_RANGE = 15
self.MINUTES_IN_HIGH_RANGE = 30
self.MINUTES_AT_ALARM_1 = 30
self.MINUTES_AT_ALARM_0 = 30
self.MINUTES_BELOW_CRITICAL_RANGE = 15
self.MINUTES_BELOW_HIGH_RANGE = 15
self.MINUTES_WITH_NO_DATA = 30
self.MINUTES_FOR_COM_ALARM_REMINDER = 60
# Alarm Level Constants
self.NORMAL_STATE = 0
self.HIGH_TEMP_ALARM_1 = 1
self.HIGH_TEMP_ALARM_2 = 2
self.CRITICAL_TEMP_ALARM = 3
self.CRITICAL_TEMP_ALARM_SILENCED = 4
self.CRITICAL_TEMP_TO_HIGH_TEMP_ALARM = 5
self.COMMUNICATION_ALARM = 6
self.COMMUNICATION_ALARM_SILENCED = 7
# Used to make time into an int which is how time is stored in the database (time.time()*1000)
self.TIME_THOUSAND = 1000
#print "initilized"
################################################################################
# Temperature Alarms
def checkTemp(self, freezer, currentTemp, setpoint1 = None, setpoint2=None):
#print "checking temp of", freezer, currentTemp, setpoint1, setpoint2
readQuery = ("select freezer_alarm_ID, freezer_setpoint1, freezer_setpoint2, freezer_location, freezer_name, freezer_description, freezer_send_alarm from freezers where freezer_id = %s")
self.readcursor.execute(readQuery, (freezer))
alarmIDData = self.readcursor.fetchall()
#print alarmIDData
freezerAlarmID = alarmIDData[0][0]
if not setpoint1:
setpoint1 = float(alarmIDData[0][1])
if not setpoint2:
setpoint2 = float(alarmIDData[0][2])
location = alarmIDData[0][3]
name = alarmIDData[0][4]
description = alarmIDData[0][5]
# Send Alarm = 0 it will not sound an alarm
send_alarm = alarmIDData[0][6]
location = re.sub("<br>", ' ', location)
#print "freezerAlarmID, setpoint1, setpoint2", freezerAlarmID, setpoint1, setpoint2
# Takes in freezerID, currentTemp, the two setpoints, and
# freezerAlarmID and checks to see if freezer is in an alarm state.
#print "checking temp of", freezer, currentTemp, setpoint1, setpoint2
# get data for most recent alarm level
readQuery = ("select alarm_time, alarm_level from alarm where alarm_id = %s")
numResults = self.readcursor.execute(readQuery, (freezerAlarmID))
# check to ensure there was data pulled from the database
#if numResults == 0:
# break
# retrieve the data from the query
data = self.readcursor.fetchall()
alarmTime = int(data[0][0])
alarmLevel = (data[0][1])
#print "alarmTime, alarmLevel:", alarmTime, alarmLevel
# currentTemp is in a critical range
if currentTemp > setpoint2:
#print "currentTemp in critical range", currentTemp, setpoint2
# check if the temperature has been in a critical range for 15 min
noAlarm = self.checkForNoAlarm(freezer, setpoint2, self.MINUTES_IN_CRITICAL_RANGE)
#print "noAlarm critical 0= sound alarm 1=dont", noAlarm
# if the temperature has been above setpoint2 send an alarm
if noAlarm == 0:
#print "critical"
# silenced alarm does not send out message anymore
# need more code for this on the website
# silenced (3 > Alarm 4)
| if alarmLevel == self.CRITICAL_TEMP_ALARM_SILENCED:
#print "alarmLevel self.CRITICAL_TEMP_ALARM_SILENCED", alarmLevel
| pass
# constant reminder alarm every 60min
elif alarmLevel == self.CRITICAL_TEMP_ALARM:
#check to see if it has been > 60 min since the last alarm
# Reminder 3 > Alarm 3 (1 hour)
if alarmTime < (((time.time())-(self.SIXTY_SECONDS * self.MINUTES_FOR_CRITICAL_RANGE_REMINDER))*self.TIME_THOUSAND):
#print "reminder alarmLevel self.CRITICAL_TEMP_ALARM, time", alarmLevel, alarmTime
# Set alarm to self.CRITICAL_TEMP_ALARM, critical range reminder
self.newAlarm(freezer, self.CRITICAL_TEMP_ALARM)
# prepare query to get email addresses
# alarm(alarm level number) = 1, the contact should get an
|
drphilmarshall/Music | beatbox/universe.py | Python | mit | 38,369 | 0.013188 | import numpy as np
import matplotlib
import matplotlib.pylab as plt
import healpy as hp
import string
import yt
import os
import glob
from PIL import Image as PIL_Image
from images2gif import writeGif
from scipy.special import sph_harm,sph_jn
import beatbox
from beatbox.multiverse import Multiverse
# ===================================================================
def set_k_filter(self):
"""
Define a filter over the k space for the modes between kmin and kmax
"""
#Define lower & upper bounds for the filter
Universe.high_k_cutoff = Universe.truncated_nmax*Universe.Deltak
Universe.low_k_cutoff = Universe.truncated_nmin*Universe.Deltak
# Define the filter
low_k_filter = (~(Universe.n < Universe.truncated_nmin)).astype(int)
high_k_filter = (~(Universe.n > Universe.truncated_nmax)).astype(int)
Universe.kfilter = high_k_filter*low_k_filter
return
def populate_response_matrix(self):
"""
Populate the R matrix for the default range of l and n, or
or over the range specified above
"""
truncated_nmax = Universe.truncated_nmax
truncated_nmin = Universe.truncated_nmin
truncated_lmax = Universe.truncated_lmax
truncated_lmin = Universe.truncated_lmin
lms = Universe.lms
kfilter = Universe.kfilter
# Initialize R matrix:
NY = (truncated_lmax + 1)**2 - (truncated_lmin)**2
# Find the indices of the non-zero elements of the filter
ind = np.where(Universe.kfilter>0)
# The n index spans 2x that length, 1st half for the cos coefficients, 2nd half
# for the sin coefficients
NN = 2*len(ind[1])
R_long = np.zeros([NY,NN], dtype=np.complex128)
k, theta, phi = Universe.k[ind], np.arctan2(Universe.ky[ind],Universe.kx[ind]), np.arccos(Universe.kz[ind]/Universe.k[ind])
# We need to fix the 'nan' theta element that came from having ky=0
theta[np.isnan(theta)] = np.pi/2.0
# Get ready to loop over y
y = 0
A = [sph_jn(truncated_lmax,ki)[0] for ki in k]
# Loop over y, computing element | s of R_yn
for i in lms:
| l = i[0]
m = i[1]
trigpart = np.cos(np.pi*l/2.0)
B = np.asarray([A[ki][l] for ki in range(len(k))])
R_long[y,:NN/2] = 4.0 * np.pi * sph_harm(m,l,theta,phi).reshape(NN/2)*B.reshape(NN/2) * trigpart
trigpart = np.sin(np.pi*l/2.0)
R_long[y,NN/2:] = 4.0 * np.pi * sph_harm(m,l,theta,phi).reshape(NN/2)*B.reshape(NN/2)* trigpart
y = y+1
Universe.R = np.zeros([NY,len(ind[1])], dtype=np.complex128)
Universe.R = np.append(R_long[:,0:len(ind[1])/2], R_long[:,len(ind[1]):3*len(ind[1])/2], axis=1)
return
# ====================================================================
class Universe(object):
"""
A simple model universe in a box.
"""
# ====================================================================
#Initialize the class variables
PIXSCALE = 0.1
BOXSIZE = 4.0
# Real space: define a coordinate grid:
NPIX = int(BOXSIZE/PIXSCALE) + 1
Nj = np.complex(0.0,NPIX)
#x, y, z = np.mgrid[-BOXSIZE/2.0+BOXSIZE/(2*float(NPIX)):BOXSIZE/2.0-BOXSIZE/(2*float(NPIX)):Nj, -BOXSIZE/2.0+BOXSIZE/(2*float(NPIX)):BOXSIZE/2.0-BOXSIZE/(2*float(NPIX)):Nj, -BOXSIZE/2.0+BOXSIZE/(2*float(NPIX)):BOXSIZE/2.0-BOXSIZE/(2*float(NPIX)):Nj]
x, y, z = np.mgrid[-BOXSIZE/2.0+BOXSIZE/(2*float(NPIX)):BOXSIZE/2.0-BOXSIZE/(2*float(NPIX)):Nj, -BOXSIZE/2.0+BOXSIZE/(2*float(NPIX)):BOXSIZE/2.0-BOXSIZE/(2*float(NPIX)):Nj, -BOXSIZE/2.0+BOXSIZE/(2*float(NPIX)):BOXSIZE/2.0-BOXSIZE/(2*float(NPIX)):Nj]
print beatbox.Multiverse.truncated_nmin
# Define the truncatad range of modes (in n and l) we want in our Universe:
try:
truncated_nmax = beatbox.Multiverse.truncated_nmax
truncated_nmin = beatbox.Multiverse.truncated_nmin
truncated_lmax = beatbox.Multiverse.truncated_lmax
truncated_lmin = beatbox.Multiverse.truncated_lmin
except NameError:
truncated_nmax = 2
truncated_nmin = 1
truncated_lmax = 8
truncated_lmin = 0
# If only truncated_lmax is provided, calculated the largest truncated_nmax we can reconstruct
if (truncated_lmax is not None) and (truncated_nmax is None):
truncated_nmax = int(np.floor((3.0*(truncated_lmax+1)**2.0/(4.0*np.pi))**(1.0/3.0)))
# Else define a default value for truncated_nmax if not already done
elif truncated_nmax is None:
truncated_nmax = 6
# If only truncated_nmax is provided, calculated the truncated_lmax needed for no information
# from the 3D map to be lost
if (truncated_nmax is not None) and (truncated_lmax is None):
truncated_lmax = int(np.ceil(-0.5+2.0*truncated_nmax**(3.0/2.0)*np.sqrt(np.pi/3.0)))
# Make a y_max-long tupple of l and m pairs
if None not in (truncated_lmin, truncated_lmax):
lms = [(l, m) for l in range(truncated_lmin,truncated_lmax+1) for m in range(-l, l+1)]
# Fourier space: define a coordinate grid:
# The nmax we need for the resolution we want in our Universe is:
nmax = int(BOXSIZE/(2*PIXSCALE))
Deltak = 2.0*np.pi/BOXSIZE
kmax = nmax*Deltak
kx, ky, kz = np.meshgrid(np.linspace(-kmax,kmax,NPIX),np.linspace(-kmax,kmax,NPIX),np.linspace(-kmax,kmax,NPIX), indexing='ij')
k = np.sqrt(np.power(kx, 2)+np.power(ky,2)+np.power(kz,2))
nx, ny, nz = np.meshgrid(np.linspace(-nmax,nmax,NPIX),np.linspace(-nmax,nmax,NPIX),np.linspace(-nmax,nmax,NPIX), indexing='ij');
n = np.sqrt(np.power(nx, 2)+np.power(ny,2)+np.power(nz,2));
# Define the computer Fourier coordinates, used for iFFT
kmax_for_iFFt = 1/(2*PIXSCALE)
Deltak_for_iFFT = (1/BOXSIZE)
kx_for_iFFT = nx/BOXSIZE
ky_for_iFFT = ny/BOXSIZE
kz_for_iFFT = nz/BOXSIZE
# Define filter in k-space, that contains the modes we want:
kfilter = None
set_Universe_k_filter = set_k_filter
#Define and populate the R matrix:
R = None
populate_Universe_R = populate_response_matrix
#==========================================================
def __init__(self):
# The potential map (pure real):
self.phi = self.x * 0.0
# The CMB temperature map:
self.Tmap = None
self.NSIDE = None
return
def __str__(self):
return "an empty model universe, containing a grid 41x41x41 pixels (and corresponding k grid in Fourrier space), a k filter and the corresponding R matrix mapping between those k values and a range of l (given by the Multiverse)"
# ----------------------------------------------------------------
def read_in_CMB_T_map(self,from_this=None):
if from_this is None:
print "No CMB T map file supplied."
self.Tmapfile = None
else:
self.Tmapfile = from_this
self.Tmap = hp.read_map(from_this)
self.NSIDE = hp.npix2nside(len(self.Tmap))
return
def show_CMB_T_map(self,Tmap=None, title = "CMB graviational potential fluctuations as seen from inside the LSS", from_perspective_of = "observer"):
if Tmap is None:
self.NSIDE = 256
self.Tmap = hp.alm2map(self.alm,self.NSIDE)
else:
self.Tmap = Tmap
if from_perspective_of == "observer":
# Sky map:
hp.mollview(self.Tmap, rot=(-90,0,0), min=-60, max=45, title=title + ", $l_max=$%d" % self.truncated_lmax)
# else:
# # Interactive "external" view ([like this](http://zonca.github.io/2013/03/interactive-3d-plot-of-sky-map.html)) pass
# # beatbox.zoncaview(self.Tmap)
# # This did not work, sadly. Maybe we can find a 3D
# # spherical surface plot routine using matplotlib? For
# # now, just use the healpix vis.
# R = (0.0,0.0,0.0) # (lon,lat,psi) to specify center of map and rotation to apply
# hp.orthview(self.Tmap,rot=R,flip='geo',half_sky=True,title="CMB graviational potential fluctuations as seen from outside the LSS, l_{max}=%d" % sel |
datapythonista/pandas | pandas/tests/io/xml/test_xml.py | Python | bsd-3-clause | 34,008 | 0.000265 | from io import (
BytesIO,
StringIO,
)
import os
from typing import Union
from urllib.error import HTTPError
import numpy as np
import pytest
from pandas.compat import PY38
import pandas.util._test_decorators as td
from pandas import DataFrame
import pandas._testing as tm
from pandas.io.xml import read_xml
"""
CHECK LIST
[x] - ValueError: "Values for parser can only be lxml or etree."
etree
[X] - ImportError: "lxml not found, please install or use the etree parser."
[X] - TypeError: "expected str, bytes or os.PathLike object, not NoneType"
[X] - ValueError: "Either element or attributes can be parsed not both."
[X] - ValueError: "xpath does not return any nodes..."
[X] - SyntaxError: "You have used an incorrect or unsupported XPath"
[X] - ValueError: "names does not match length of child elements in xpath."
[X] - TypeError: "...is not a valid type for names"
[X] - ValueError: "To use stylesheet, you need lxml installed..."
[] - URLError: (GENERAL ERROR WITH HTTPError AS SUBCLASS)
[X] - HTTPError: "HTTP Error 404: Not Found"
[] - OSError: (GENERAL ERROR WITH FileNotFoundError AS SUBCLASS)
[X] - FileNotFoundError: "No such file or directory"
[] - ParseError (FAILSAFE CATCH ALL FOR VERY COMPLEX XML)
[X] - UnicodeDecodeError: "'utf-8' codec can't decode byte 0xe9..."
[X] - UnicodeError: "UTF-16 stream does not start with BOM"
[X] - BadZipFile: "File is not a zip file"
[X] - OSError: "Invalid data stream"
[X] - LZMAError: "Input format not supported by decoder"
[X] - ValueError: "Unrecognized compression type"
[X] - PermissionError: "Forbidden"
lxml
[X] - ValueError: "Either element or attributes can be parsed not both."
[X] - AttributeError: "__enter__"
[X] - XSLTApplyError: "Cannot resolve URI"
[X] - XSLTParseError: "document is not a stylesheet"
[X] - ValueError: "xpath does not return any nodes."
[X] - XPathEvalError: "Invalid expression"
[] - XPathSyntaxError: (OLD VERSION IN lxml FOR XPATH ERRORS)
[X] - TypeError: "empty namespace prefix is not supported in XPath"
[X] - ValueError: "names does not match length of child elements in xpath."
[X] - TypeError: "...is not a valid type for names"
[X] - LookupError: "unknown encoding"
[] - URLError: (USUALLY DUE TO NETWORKING)
[X - HTTPError: "HTTP Error 404: Not Found"
[X] - OSError: "failed to load external entity"
[X] - XMLSyntaxError: "Start tag expected, '<' not found"
[] - ParserError: (FAILSAFE CATCH ALL FOR VERY COMPLEX XML
[X] - ValueError: "Values for parser can only be lxml or etree."
[X] - UnicodeDecodeError: "'utf-8' codec can't decode byte 0xe9..."
[X] - UnicodeError: "UTF-16 stream does not start with BOM"
[X] - BadZipFile: "File is not a zip file"
[X] - OSError: "Invalid data stream"
[X] - LZMAError: "Input format not supported by decoder"
[X] - ValueError: "Unrecognized compression type"
[X] - PermissionError: "Forbidden"
"""
geom_df = DataFrame(
{
"shape": ["square", "circle", "triangle"],
"degrees": [360, 360, 180],
"sides": [4, np.nan, 3],
}
)
xml_default_nmsp = """\
<?xml version='1.0' encoding='utf-8'?>
<data xmlns="http://example.com">
<row>
<shape>square</shape>
<degrees>360</degrees>
<sides>4</sides>
</row>
<row>
<shape>circle</shape>
<degrees>360</degrees>
<sides/>
</row>
<row>
<shape>triangle</shape>
<degrees>180</degrees>
<sid | es>3</sides>
</row>
</data>"""
xml_prefix_nmsp = """\
<?xml version='1.0' encoding='utf-8'?>
<doc:data xmlns:doc="http://example.com">
<doc:row>
<doc:shape>square</doc:shape>
<doc:degrees>360</doc:degrees>
<doc:sides>4.0</doc:sides>
</doc:row>
<doc:row>
<doc:shape>circle</doc:shape>
<doc:degrees>360</doc:degrees>
<doc:sides/>
</doc:row>
| <doc:row>
<doc:shape>triangle</doc:shape>
<doc:degrees>180</doc:degrees>
<doc:sides>3.0</doc:sides>
</doc:row>
</doc:data>"""
df_kml = DataFrame(
{
"id": {
0: "ID_00001",
1: "ID_00002",
2: "ID_00003",
3: "ID_00004",
4: "ID_00005",
},
"name": {
0: "Blue Line (Forest Park)",
1: "Red, Purple Line",
2: "Red, Purple Line",
3: "Red, Purple Line",
4: "Red, Purple Line",
},
"styleUrl": {
0: "#LineStyle01",
1: "#LineStyle01",
2: "#LineStyle01",
3: "#LineStyle01",
4: "#LineStyle01",
},
"extrude": {0: 0, 1: 0, 2: 0, 3: 0, 4: 0},
"altitudeMode": {
0: "clampedToGround",
1: "clampedToGround",
2: "clampedToGround",
3: "clampedToGround",
4: "clampedToGround",
},
"coordinates": {
0: (
"-87.77678526964958,41.8708863930319,0 "
"-87.77826234150609,41.87097820122218,0 "
"-87.78251583439344,41.87130129991005,0 "
"-87.78418294588424,41.87145055520308,0 "
"-87.7872369165933,41.8717239119163,0 "
"-87.79160214925886,41.87210797280065,0"
),
1: (
"-87.65758750947528,41.96427269188822,0 "
"-87.65802133507393,41.96581929055245,0 "
"-87.65819033925305,41.96621846093642,0 "
"-87.6583189819129,41.96650362897086,0 "
"-87.65835858701473,41.96669002089185,0 "
"-87.65838428411853,41.96688150295095,0 "
"-87.65842208882658,41.96745896091846,0 "
"-87.65846556843937,41.9683761425439,0 "
"-87.65849296214573,41.96913893870342,0"
),
2: (
"-87.65492939166126,41.95377494531437,0 "
"-87.65557043199591,41.95376544118533,0 "
"-87.65606302030132,41.95376391658746,0 "
"-87.65623502146268,41.95377379126367,0 "
"-87.65634748981634,41.95380103566435,0 "
"-87.65646537904269,41.95387703994676,0 "
"-87.65656532461145,41.95396622645799,0 "
"-87.65664760856414,41.95404201996044,0 "
"-87.65671750555913,41.95416647054043,0 "
"-87.65673983607117,41.95429949810849,0 "
"-87.65673866475777,41.95441024240925,0 "
"-87.6567690255541,41.95490657227902,0 "
"-87.65683672482363,41.95692259283837,0 "
"-87.6568900886376,41.95861070983142,0 "
"-87.65699865558875,41.96181418669004,0 "
"-87.65756347177603,41.96397045777844,0 "
"-87.65758750947528,41.96427269188822,0"
),
3: (
"-87.65362593118043,41.94742799535678,0 "
"-87.65363554415794,41.94819886386848,0 "
"-87.6536456393239,41.95059994675451,0 "
"-87.65365831235026,41.95108288489359,0 "
"-87.6536604873874,41.9519954657554,0 "
"-87.65362592053201,41.95245597302328,0 "
"-87.65367158496069,41.95311153649393,0 "
"-87.65368468595476,41.9533202828916,0 "
"-87.65369271253692,41.95343095587119,0 "
"-87.65373335834569,41.95351536301472,0 "
"-87.65378605844126,41.95358212680591,0 "
"-87.65385067928185,41.95364452823767,0 "
"-87.6539390793817,41.95370263886964,0 "
"-87.6540786298351,41.95373403675265,0 "
"-87.65430648647626,41.9537535411832,0 "
"-87.65492939166126,41.95377494531437,0"
),
4: (
"-87.65345391792157,41.94217681262115,0 "
"-87.65342448305786,41.94237224420864,0 "
"-87.65339745703922,41.94268217746244,0 "
"-87.65337753982941,41.94288140770284,0 "
"-87.65336256753105,41.94317369618263,0 "
"-87.65338799707138,41.94357253961736,0 "
"-87.65340240886648,41.94389158188269,0 "
"-87.65341837392448,41.94406444407721,0 "
"-87.65342275247338,41.94421065714904,0 |
hfp/tensorflow-xsmm | tensorflow/python/saved_model/model_utils/__init__.py | Python | apache-2.0 | 1,544 | 0.003886 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utils for saving a Keras Model or Estimator to the SavedModel format."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=wildcard-import
from tensorflow.python.saved_model.model_utils.export_output import *
from tensorflow.python.saved_model.model_utils.export_utils import build_all_signature_defs
from tensorflow.python.saved_model.model_utils.export_utils import export_outputs_for_mode
from tensorflow.python.saved_model.model_utils.export_utils import EXPORT_TAG_MAP
from tensorflow.python.saved_model.model_utils.export_utils import get_export | _outputs
from tensorflow.python.saved_ | model.model_utils.export_utils import get_temp_export_dir
from tensorflow.python.saved_model.model_utils.export_utils import get_timestamped_export_dir
# pylint: enable=wildcard-import
|
katyushacccp/ISN_projet_final | final 0.0/modules/affichage.py | Python | cc0-1.0 | 2,709 | 0.068291 | from tkinter import *
from modules.gestionnaire import *
def couleurAffiche(couleur):
if couleur == "red":
return "#CC0000"
elif couleur == "green":
return "#006600"
elif couleur == "blue":
return "#0000CC"
elif couleur == "orange":
return "#FF4500"
elif couleur == "yellow":
return "#FFD500"
elif couleur == "white":
return "#FFFFFF"
def actualise(cube,can):
for i in range(3): # la hauteur
for j in range(3): # la longueur
can.create_rectangle(20+180+50*j,20+20+50*i,20+230+50*j,20+70+50*i,width=2,fill=couleurAffiche(cube[4][3*i+j]))
for z in range(4):
for i in range(3):
for j in range(3):
can.create_rectangle(20+20+160*z+50*j,20+180+50*i,20+70+160*z+50*j,20+230+50*i,width=2,fill=couleurAffiche(cube[z][3*i+j]))
for i in range(3):
for j in range(3):
can.create_rectangle(20+180+50*j,20+340+50*i,20+230+50*j,20+390+50*i,width=2,fill=couleurAffiche(cube[5][3*i+j]))
can.create_rectangle(20+170,20+10,20+180,20+500,width=0,fill="black")
can.create_rectangle(20+330,20+10,20+340,20+500,width=0,fill="black")
can.create_rectangle(20+10,20+170,20+20,20+340,width=0,fill="black")
can.create_rectangle(20+490,20+170,20+500,20+340,width=0,fill="black")
can.create_rectangle(20+650,20+170,20+660,20+340,width=0,fill="black")
can.create_rectangle(20+10,20+170,20+660,20+180,width=0,fill="black")
can.create_rectangle(20+10,20+330,20+660,20+340,width=0,fill="black")
can.create_rectangle(20+170,20+10,20+330,20+20,width=0,fill="black")
can.create_rectangle(20+170,20+490,20+330,20+500,width=0,fill="black")
can.create_rectangle(70,177,160,190,width=1,fill=couleurAffiche(cube[4][4]))
can.create_rectangle(70,360,160,373,width=1,fill=couleurAffiche(cube[5][4]))
can | .create_rectangle(390,177,480,190,width=1,fill=couleurAffiche(cube[4][4]))
can.create_rectangle(390,360,480,373,width=1,fill=couleurAffiche(cube[5][4]))
can.create_rectangle(550,177,640,190,width=1,fill=couleurAffiche(cube[4][4]))
can.create_rectangle(550,360,640,373,width=1,fi | ll=couleurAffiche(cube[5][4]))
can.create_rectangle(230,17,320,30,width=1,fill=couleurAffiche(cube[3][4]))
can.create_rectangle(230,520,320,533,width=1,fill=couleurAffiche(cube[3][4]))
can.create_rectangle(17,230,30,320,width=1,fill=couleurAffiche(cube[3][4]))
can.create_rectangle(693,230,680,320,width=1,fill=couleurAffiche(cube[0][4]))
can.create_rectangle(177,70,190,160,width=1,fill=couleurAffiche(cube[0][4]))
can.create_rectangle(360,70,373,160,width=1,fill=couleurAffiche(cube[2][4]))
can.create_rectangle(177,390,190,480,width=1,fill=couleurAffiche(cube[0][4]))
can.create_rectangle(360,390,373,480,width=1,fill=couleurAffiche(cube[2][4])) |
agamdua/hamster-core | hamster/jobs/models.py | Python | mit | 209 | 0.004785 | from django.db imp | ort models
class Job(models.Model):
job_name = models.CharField(max_length=80)
disabled = m | odels.BooleanField(default=False)
def __unicode__(self):
return self.job_name
|
flinz/eatnit | eatnit/apps/food/urls.py | Python | gpl-2.0 | 482 | 0.006224 | from | django.conf.urls import patterns, url
from eatnit.apps.food import views
urlpatterns = patterns('',
url(r'^$', views.index, name='eatnit_index'),
# url(r'^meals/$', views.meal_index, name='meal_index'),
# url(r'^meals/(?P<meal_id>\d+)/$', views.meal_detail, name='meal_detail'),
# url(r'^restaurants/$', views.restaurant_index, name='restaurant_index'),
# url(r'^restaurants/(?P<restaurant_id>\d+)/$', vie | ws.restaurant_detail, name='restaurant_detail'),
) |
edwinsteele/visual-commute | vcapp/migrations/0002_initial.py | Python | cc0-1.0 | 5,957 | 0.006211 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Station'
db.create_table('vcapp_station', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('station_name', self.gf('django.db.models.fields.CharField')(max_length=50)),
('lon', self.gf('django.db.models.fields.FloatField')()),
('lat', self.gf('django.db.models.fields.FloatField')()),
))
db.send_create_signal('vcapp', ['Station'])
# Adding model 'Line'
db.create_table('vcapp_line', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('line_name', self.gf('django.db.models.fields.CharField')(max_length=50)),
))
db.send_create_signal('vcapp', ['Line'])
# Adding model 'Trip'
db.create_table('vcapp_trip', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('timetable_type', self.gf('django.db.models.fields.CharField')(max_length=2)),
('line', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Line'])),
))
db.send_create_signal('vcapp', ['Trip'])
# Adding model 'TripStop'
db.create_table('vcapp_tripstop', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('departure_time', self.gf('django.db.models.fields.TimeField')()),
('trip', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Trip'])),
('station', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Station'])),
))
db.send_create_signal('vcapp', ['TripStop'])
# Adding model 'Segment'
db.create_table('vcapp_segment', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('departure_tripstop', self.gf('django.db.models.fields.related.ForeignKey')(related_name='departure_point', to=orm['vcapp.TripStop'])),
('arrival_tripstop', self.gf('django.db.models.fields.related.ForeignKey')(related_name='arrival_point', to=orm['vcapp.TripStop'])),
('trip', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Trip'])),
))
db.send_create_signal('vcapp', ['Segment'])
# Adding model 'InterchangeStation'
db.create_table('vcapp_interchangestation', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('line', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Line'])),
('station', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['vcapp.Station'])),
))
db.send_create_signal('vcapp', ['InterchangeStation'])
def backwards(self, orm):
# Deleting model 'Station'
db.delete_table('vcapp_station')
# Deleting model 'Line'
db.delete_table('vcapp_line')
# Deleting model 'Trip'
db.delete_table('vcapp_trip')
# Deleting model 'TripStop'
db.delete_table('vcapp_tripstop')
# Deleting model 'Segment'
db.delete_table('vcapp_segment')
# Deleting model 'InterchangeStation'
db.delete_table('vcapp_interchangestation')
models = {
'vcapp.interchangestation': {
'Meta': {'object_name': 'InterchangeStation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Line']"}),
| 'station': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Station']"})
},
'vcapp.line': {
'Meta': {'object_name': 'Line'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line_name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'vcapp.segment': {
'Meta': {'object_name': 'Segment'},
'arrival_trips | top': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'arrival_point'", 'to': "orm['vcapp.TripStop']"}),
'departure_tripstop': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'departure_point'", 'to': "orm['vcapp.TripStop']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Trip']"})
},
'vcapp.station': {
'Meta': {'object_name': 'Station'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lat': ('django.db.models.fields.FloatField', [], {}),
'lon': ('django.db.models.fields.FloatField', [], {}),
'station_name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'vcapp.trip': {
'Meta': {'object_name': 'Trip'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'line': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Line']"}),
'timetable_type': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
'vcapp.tripstop': {
'Meta': {'object_name': 'TripStop'},
'departure_time': ('django.db.models.fields.TimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'station': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Station']"}),
'trip': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['vcapp.Trip']"})
}
}
complete_apps = ['vcapp'] |
colinbrislawn/scikit-bio | skbio/sequence/_iupac_sequence.py | Python | bsd-3-clause | 16,530 | 0.00006 | # ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
from future.utils import with_metaclass
from abc import ABCMeta, abstractproperty
from itertools import product
import numpy as np
import re
from skbio.util._decorator import classproperty, overrides, stable
from skbio.util._misc import MiniRegistry
from ._sequence import Sequence
class IUPACSequence(with_metaclass(ABCMeta, Sequence)):
"""Store biological sequence data conforming to the IUPAC character set.
This is an abstract base class (ABC) that cannot be instantiated.
Attributes
----------
values
metadata
positional_metadata
alphabet
gap_chars
nondegenerate_chars
degenerate_chars
degenerate_map
Raises
------
ValueError
If sequence characters are not in the IUPAC character set [1]_.
See Also
--------
DNA
RNA
Protein
References
----------
.. [1] Nomenclature for incompletely specified bases in nucleic acid
sequences: recommendations 1984.
Nucleic Acids Res. May 10, 1985; 13(9): 3021-3030.
A Cornish-Bowden
"""
__validation_mask = None
__degenerate_codes = None
__nondegenerate_codes = None
__gap_codes = None
@classproperty
def _validation_mask(cls):
# TODO These masks could be defined (as literals) on each concrete
# object. For now, memoize!
if cls.__validation_mask is None:
cls.__validation_mask = np.invert(np.bincount(
np.fromstring(''.join(cls.alphabet), dtype=np.uint8),
minlength=cls._number_of_extended_ascii_codes).astype(bool))
return cls.__validation_mask
@classproperty
def _degenerate_codes(cls):
if cls.__degenerate_codes is None:
degens = cls.degenerate_chars
cls.__degenerate_codes = np.asarray([ord(d) for d in degens])
return cls.__degenerate_codes
@classproperty
def _nondegenerate_codes(cls):
if cls.__nondegenerate_codes is None:
nondegens = cls.nondegenerate_chars
cls.__nondegenerate_codes = np.asarray([ord(d) for d in nondegens])
return cls.__nondegenerate_codes
@classproperty
def _gap_codes(cls):
if cls.__gap_codes is None:
gaps = cls.gap_chars
cls.__gap_codes = np.asarray([ord(g) for g in gaps])
return cls.__gap_codes
@classproperty
@stable(as_of='0.4.0')
def alphabet(cls):
"""Return valid IUPAC characters.
This includes gap, non-degenerate, and degenerate characters.
Returns
-------
set
Valid IUPAC characters.
"""
return cls.degenerate_chars | cls.nondegenerate_chars | cls.gap_chars
@classproperty
@stable(as_of='0.4.0')
def gap_chars(cls):
"""Return characters defined as gaps.
Returns
-------
set
Characters defined as gaps.
"""
return set('-.')
@classproperty
@stable(as_of='0.4.0')
def degenerate_chars(cls):
"""Return degenerate IUPAC characters.
Returns
-------
set
Degenerate IUPAC characters.
"""
return set(cls.degenerate_map)
@abstractproperty
@classproperty
@stable(as_of='0.4.0')
def nondegenerate_chars(cls):
"""Return non-degenerate IUPAC characters.
Returns
-------
set
Non-degenerate IUPAC characters.
"""
return set() # pragma: no cover
@abstractproperty
@classproperty
@stable(as_of='0.4.0')
def degenerate_map(cls):
"""Return mapping of degenerate to non-degenerate characters.
Returns
-------
dict (set)
Mapping of each degenerate IUPAC character to the set of
non-degenerate IUPAC characters it represents.
"""
return set() # pragma: no cover
@property
def _motifs(self):
return _motifs
@overrides(Sequence)
def __init__(self, sequence, metadata=None, positional_metadata=None,
lowercase=False, validate=True):
super(IUPACSequence, self).__init__(
sequence, metadata, positional_metadata, lowercase)
if validate:
self._validate()
def _validate(self):
# This is the fastest way that we have found to identify the
# presence or absence of certain characters (numbers).
# It works by multiplying a mask where the numbers which are
# permitted have a zero at their index, and all others have a one.
# The result is a vector which will propogate counts of invalid
# numbers and remove counts of valid numbers, so that we need only
# see if the array is empty to determine validity.
invalid_characters = np.bincount(
self._bytes, minlength=self._number_of_extended_ascii_codes
) * self._validation_mask
if np.any(invalid_characters):
bad = list(np.where(
invalid_characters > 0)[0].astype(np.uint8).view('|S1'))
raise ValueError(
"Invalid character%s in sequence: %r. Valid IUPAC characters: "
"%r" % ('s' if len(bad) > 1 else '',
[str(b.tostring().decode("ascii")) for b in bad] if
len(bad) > 1 else bad[0],
list(self.alphabet)))
@stable(as_of='0.4.0')
def gaps(self):
"""Find positions containing gaps in the biological sequence.
Returns
-------
1D np.ndarray (bool)
Boolean vector where ``True`` indicates a gap character is present
at that position in the biological sequence.
See Also
--------
has_gaps
Examples
--------
>>> from skbio import DNA
>>> s = DNA('AC-G-')
>>> s.gaps()
array([False, False, True, False, True], dtype=bool)
"""
return np.in1d(self._bytes, self._gap_codes)
@stable(as_of='0.4.0')
def has_gaps(sel | f):
"""Determine if the sequence contains one or more gap characters.
Returns
-------
bool
Indicates whether there are one or more occurrences of gap
characters in the biological sequence.
Examples
--------
>>> from skbio import DNA
>>> s = DNA('ACACGACGTT')
>>> s.has_gaps()
False
>>> t = DNA('A.CAC--GACGTT')
>>> t.has | _gaps()
True
"""
# TODO use count, there aren't that many gap chars
# TODO: cache results
return bool(self.gaps().any())
@stable(as_of='0.4.0')
def degenerates(self):
"""Find positions containing degenerate characters in the sequence.
Returns
-------
1D np.ndarray (bool)
Boolean vector where ``True`` indicates a degenerate character is
present at that position in the biological sequence.
See Also
--------
has_degenerates
nondegenerates
has_nondegenerates
Examples
--------
>>> from skbio import DNA
>>> s = DNA('ACWGN')
>>> s.degenerates()
array([False, False, True, False, True], dtype=bool)
"""
return np.in1d(self._bytes, self._degenerate_codes)
@stable(as_of='0.4.0')
def has_degenerates(self):
"""Determine if sequence contains one or more degenerate characters.
Returns
-------
bool
Indicates whether there are one or more occurrences of degenerate
characters in the biological sequence.
See Also
--------
degenerate |
googleads/google-ads-python | google/ads/googleads/v10/enums/types/webpage_condition_operator.py | Python | apache-2.0 | 1,199 | 0.000834 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v10.enums",
marshal="google.ads.googleads.v10",
manifest={"WebpageConditionOperatorEnum",},
)
|
class WebpageConditionOperatorEnum(proto.Message):
r"""Container for enum describing webpage condition operator | in
webpage criterion.
"""
class WebpageConditionOperator(proto.Enum):
r"""The webpage condition operator in webpage criterion."""
UNSPECIFIED = 0
UNKNOWN = 1
EQUALS = 2
CONTAINS = 3
__all__ = tuple(sorted(__protobuf__.manifest))
|
tchellomello/home-assistant | homeassistant/components/nello/lock.py | Python | apache-2.0 | 2,961 | 0.001013 | """Nello.io lock platform."""
from itertools import filterfalse
import logging
from pynello.private import Nello
import voluptuous as vol
from homeassistant.components.lock import PLATFORM_SCHEMA, LockEntity
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name | __)
ATTR_ADDRESS = "address"
ATTR_LOCATION_ID = "location_id"
EVENT_DOOR_BELL = "nello_bell_ring"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERN | AME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Nello lock platform."""
nello = Nello(config.get(CONF_USERNAME), config.get(CONF_PASSWORD))
add_entities([NelloLock(lock) for lock in nello.locations], True)
class NelloLock(LockEntity):
"""Representation of a Nello lock."""
def __init__(self, nello_lock):
"""Initialize the lock."""
self._nello_lock = nello_lock
self._device_attrs = None
self._activity = None
self._name = None
@property
def name(self):
"""Return the name of the lock."""
return self._name
@property
def is_locked(self):
"""Return true if lock is locked."""
return True
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
return self._device_attrs
def update(self):
"""Update the nello lock properties."""
self._nello_lock.update()
# Location identifiers
location_id = self._nello_lock.location_id
short_id = self._nello_lock.short_id
address = self._nello_lock.address
self._name = f"Nello {short_id}"
self._device_attrs = {ATTR_ADDRESS: address, ATTR_LOCATION_ID: location_id}
# Process recent activity
activity = self._nello_lock.activity
if self._activity:
# Filter out old events
new_activity = list(filterfalse(lambda x: x in self._activity, activity))
if new_activity:
for act in new_activity:
activity_type = act.get("type")
if activity_type == "bell.ring.denied":
event_data = {
"address": address,
"date": act.get("date"),
"description": act.get("description"),
"location_id": location_id,
"short_id": short_id,
}
self.hass.bus.fire(EVENT_DOOR_BELL, event_data)
# Save the activity history so that we don't trigger an event twice
self._activity = activity
def unlock(self, **kwargs):
"""Unlock the device."""
if not self._nello_lock.open_door():
_LOGGER.error("Failed to unlock")
|
CloudBrewery/duplicity-swiftkeys | duplicity/backends/hsibackend.py | Python | gpl-2.0 | 2,186 | 0.004575 | # -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright 2002 Ben Escoto <ben@emerose.org>
# Copyright 2007 Kenneth Loafman <kenneth@loafman.com>
#
# This file is part of duplicity.
#
# Duplicity is free software; you can redistribute it and/or modify it
# under the ter | ms of the GNU General Public License as published by the
# Free Software Foundation; either version 2 of the License, or (at | your
# option) any later version.
#
# Duplicity is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with duplicity; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import os
import duplicity.backend
hsi_command = "hsi"
class HSIBackend(duplicity.backend.Backend):
def __init__(self, parsed_url):
duplicity.backend.Backend.__init__(self, parsed_url)
self.host_string = parsed_url.hostname
self.remote_dir = parsed_url.path
if self.remote_dir:
self.remote_prefix = self.remote_dir + "/"
else:
self.remote_prefix = ""
def _put(self, source_path, remote_filename):
commandline = '%s "put %s : %s%s"' % (hsi_command,source_path.name,self.remote_prefix,remote_filename)
self.subprocess_popen(commandline)
def _get(self, remote_filename, local_path):
commandline = '%s "get %s : %s%s"' % (hsi_command, local_path.name, self.remote_prefix, remote_filename)
self.subprocess_popen(commandline)
def _list(self):
commandline = '%s "ls -l %s"' % (hsi_command, self.remote_dir)
l = os.popen3(commandline)[2].readlines()[3:]
for i in range(0,len(l)):
l[i] = l[i].split()[-1]
return [x for x in l if x]
def _delete(self, filename):
commandline = '%s "rm %s%s"' % (hsi_command, self.remote_prefix, filename)
self.subprocess_popen(commandline)
duplicity.backend.register_backend("hsi", HSIBackend)
|
pycam/python-functions-and-modules | my_first_module.py | Python | unlicense | 51 | 0 | de | f say_hello(user):
print('Hello', user, | '!')
|
kaarl/pyload | module/plugins/internal/Account.py | Python | gpl-3.0 | 12,347 | 0.007208 | # -*- coding: utf-8 -*-
import random
import re
import threading
import time
from module.plugins.internal.Plugin import Plugin, Skip
from module.plugins.internal.misc import Periodical, compare_time, decode, isiterable, lock, parse_size
class Account(Plugin):
__name__ = "Account"
__type__ = "account"
__version__ = "0.75"
__status__ = "stable"
__description__ = """Base account plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "vuolter@gmail.com")]
LOGIN_TIMEOUT = 30 * 60 #: Relogin account every 30 minutes
TUNE_TIMEOUT = True #: Automatically tune relogin interval
def __init__(self, manager, accounts):
self._init(manager.core)
self.manager = manager
self.lock = threading.RLock()
self.accounts = accounts #@TODO: Recheck in 0.4.10
self.user = None
self.timeout = self.LOGIN_TIMEOUT
#: Callback of periodical job task, used by HookManager
self.periodical = Periodical(self, self.periodical_task)
self.cb = self.periodical.cb #@TODO: Recheck in 0.4.10
self.init()
@property
def logged(self):
"""
Checks if user is still logged in
"""
if not self.user:
return False
self.sync()
if self.info['login']['timestamp'] + self.timeout < time.time():
self.log_debug("Reached login timeout for user `%s`" % self.user)
return False
else:
return True
@property
def premium(self):
return bool(self.get_data('premium'))
def _log(self, level, plugintype, pluginname, messages):
log = getattr(self.pyload.log, level)
msg = u" | ".join(decode(a).strip() for a in messages if a)
#: Hide any password
try:
msg = msg.replace(self.info['login']['password'], "**********")
except Exception:
pass
log("%(plugintype)s %(pluginname)s: %(msg)s" %
{'plugintype': plugintype.upper(),
'pluginname': pluginname,
'msg' : msg})
def setup(self):
"""
Setup for enviroment and other things, called before logging (possibly more than one time)
"""
pass
def periodical_task(self):
raise NotImplementedError
def signin(self, user, password, data):
"""
Login into account, the cookies will be saved so user can be recognized
"""
pass
def login(self):
if not self.req:
self.log_info(_("Login user `%s`...") % self.user)
else:
self.log_info(_("Relogin user `%s`...") % self.user)
self.clean()
self.req = self.pyload.requestFactory.getRequest(self.classname, self.user)
self.sync()
self.setup()
timestamp = time.time()
try:
self.signin(self.user, self.info['login']['password'], self.info['data'])
except Skip, e:
self.log_warning(_("Skipped login user `%s`") % self.user, e)
self.info['login']['valid'] = True
new_timeout = timestamp - self.info['login']['timestamp']
if self.TUNE_TIMEOUT and new_timeout > self.timeout:
self.timeout = new_timeout
except Exception, e:
self.log_error(_("Could not login user `%s`") % self.user, e)
self.info['login']['valid'] = False
else:
self.info['login']['valid'] = True
finally:
self.info['login']['timestamp'] = timestamp #: Set timestamp for login
self.syncback()
return bool(self.info['login']['valid'])
#@TODO: Recheck in 0.4.10
def syncback(self):
"""
Wrapper to directly sync self.info -> self.accounts[self.user]
"""
return self.sync(reverse=True)
#@TODO: Recheck in 0.4.10
def sync(self, reverse=False):
"""
Sync self.accounts[self.user] -> self.info
or self.info -> self.accounts[self.user] (if reverse is True)
"""
if not self.user:
return
u = self.accounts[self.user]
if reverse:
u.update(self.info['data'])
u.update(self.info['login'])
else:
d = {'login': {}, 'data': {}}
for k, v in u.items():
if k in ('password', 'timestamp', 'valid'):
d['login'][k] = v
else:
d['data'][k] = v
self.info.update(d)
def relogin(self):
return self.login()
def reset(self):
self.sync()
clear = lambda x: {} if isinstance(x, dict) else [] if isiterable(x) else None
self.info['data'] = dict((k, clear(v)) for k, v in self.info['data'].items())
self.info['data']['options'] = {'limitdl': ['0']}
self.syncback()
def get_info(self, refresh=True):
"""
Retrieve account infos for an user, do **not** overwrite this method!
just use it to retrieve infos in hoster plugins. see `grab_info`
:param user: username
:param relogin: reloads cached account information
:return: dictionary with information
"""
if not self.logged:
if self.relogin():
refresh = True
else:
refresh = False
self.reset()
if refresh and self.info['login']['valid']:
self.log_info(_("Grabbing account info for user `%s`...") % self.user)
self.info = self._grab_info()
self.syncback()
self.log_debug("Account info for user `%s`: %s" % (self.user, self.info))
return self.info
def get_login(self, key=None, default=None):
d = self.get_info()['login']
return d.get(key, default) if key else d
def get_data(self, key=None, default=None):
d = self.get_info()['data']
return d.get(key, default) if key else d
def _grab_info(self):
try:
data = self.grab_info(self.user, self.info['login']['password'], self.info['data'])
if data and isinstance(data, dict):
self.info['data'].update(data)
except Exception, e:
self.log_warning(_("Error loading info for user `%s`") % self.user, e)
finally:
return self.info
def grab_info(self, user, password, data):
"""
This should be overwritten in account plugin
and retrieving account information for user
:param user:
:param req: `Request` instance
:return:
"""
pass
###########################################################################
#@TODO: Recheck and move to `AccountManager` in 0.4.10 ####################
###########################################################################
@lock
def init_accounts(self):
accounts = dict(self.accounts)
self.ac | counts.clear()
for user, info in accounts.items():
self.add(user, info['password'], info['options'])
@lock
def getAccountData(self, user, force=False):
if force:
| self.accounts[user]['plugin'].get_info()
return self.accounts[user]
@lock
def getAllAccounts(self, force=False):
if force:
self.init_accounts() #@TODO: Recheck in 0.4.10
return [self.getAccountData(user, force) for user in self.accounts]
#@TODO: Remove in 0.4.10
@lock
def scheduleRefresh(self, user, force=False):
pass
@lock
def add(self, user, password=None, options={}):
self.log_info(_("Adding user `%s`...") % user)
if user in self.accounts:
self.log_error(_("Error adding user `%s`") % user, _("User already exists"))
return False
d = {'login' : user,
'maxtraffic' : None,
'options' : options or {'limitdl': ['0']},
'password' : password or "",
'plugin' : self.__class__(self.manager, self.acc |
mgraupe/acq4 | acq4/devices/Laser/taskTemplate.py | Python | mit | 6,745 | 0.001927 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file '.\taskTemplate.ui'
#
# Created: Thu Oct 08 16:48:34 2015
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return Qt | Gui.QApplication.translate(context, text, disambig)
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(218, 236)
self.gridLayout_2 = QtGui.QGridLayout(Form)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.group | Box = QtGui.QGroupBox(Form)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.gridLayout = QtGui.QGridLayout(self.groupBox)
self.gridLayout.setSpacing(0)
self.gridLayout.setContentsMargins(3, 0, 3, 3)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.powerWaveRadio = QtGui.QRadioButton(self.groupBox)
self.powerWaveRadio.setChecked(True)
self.powerWaveRadio.setObjectName(_fromUtf8("powerWaveRadio"))
self.gridLayout.addWidget(self.powerWaveRadio, 0, 0, 1, 1)
self.switchWaveRadio = QtGui.QRadioButton(self.groupBox)
self.switchWaveRadio.setObjectName(_fromUtf8("switchWaveRadio"))
self.gridLayout.addWidget(self.switchWaveRadio, 1, 0, 1, 1)
self.gridLayout_2.addWidget(self.groupBox, 5, 0, 1, 3)
self.wavelengthWidget = QtGui.QWidget(Form)
self.wavelengthWidget.setObjectName(_fromUtf8("wavelengthWidget"))
self.horizontalLayout = QtGui.QHBoxLayout(self.wavelengthWidget)
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setMargin(0)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.setWavelengthCheck = QtGui.QCheckBox(self.wavelengthWidget)
self.setWavelengthCheck.setObjectName(_fromUtf8("setWavelengthCheck"))
self.horizontalLayout.addWidget(self.setWavelengthCheck)
self.wavelengthSpin = QtGui.QSpinBox(self.wavelengthWidget)
self.wavelengthSpin.setMaximum(4000)
self.wavelengthSpin.setSingleStep(10)
self.wavelengthSpin.setProperty("value", 1080)
self.wavelengthSpin.setObjectName(_fromUtf8("wavelengthSpin"))
self.horizontalLayout.addWidget(self.wavelengthSpin)
self.gridLayout_2.addWidget(self.wavelengthWidget, 4, 0, 1, 3)
self.label_2 = QtGui.QLabel(Form)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout_2.addWidget(self.label_2, 0, 0, 1, 1)
self.outputPowerLabel = QtGui.QLabel(Form)
self.outputPowerLabel.setObjectName(_fromUtf8("outputPowerLabel"))
self.gridLayout_2.addWidget(self.outputPowerLabel, 0, 1, 1, 1)
self.checkPowerBtn = QtGui.QPushButton(Form)
self.checkPowerBtn.setObjectName(_fromUtf8("checkPowerBtn"))
self.gridLayout_2.addWidget(self.checkPowerBtn, 0, 2, 1, 1)
self.label_3 = QtGui.QLabel(Form)
font = QtGui.QFont()
font.setBold(False)
font.setWeight(50)
self.label_3.setFont(font)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout_2.addWidget(self.label_3, 1, 0, 1, 1)
self.samplePowerLabel = QtGui.QLabel(Form)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.samplePowerLabel.setFont(font)
self.samplePowerLabel.setObjectName(_fromUtf8("samplePowerLabel"))
self.gridLayout_2.addWidget(self.samplePowerLabel, 1, 1, 1, 1)
self.adjustLengthCheck = QtGui.QCheckBox(Form)
self.adjustLengthCheck.setChecked(True)
self.adjustLengthCheck.setTristate(False)
self.adjustLengthCheck.setObjectName(_fromUtf8("adjustLengthCheck"))
self.gridLayout_2.addWidget(self.adjustLengthCheck, 3, 0, 1, 3)
self.checkPowerCheck = QtGui.QCheckBox(Form)
self.checkPowerCheck.setChecked(True)
self.checkPowerCheck.setObjectName(_fromUtf8("checkPowerCheck"))
self.gridLayout_2.addWidget(self.checkPowerCheck, 2, 0, 1, 3)
self.releaseBetweenTasks = QtGui.QRadioButton(Form)
self.releaseBetweenTasks.setObjectName(_fromUtf8("releaseBetweenTasks"))
self.releaseButtonGroup = QtGui.QButtonGroup(Form)
self.releaseButtonGroup.setObjectName(_fromUtf8("releaseButtonGroup"))
self.releaseButtonGroup.addButton(self.releaseBetweenTasks)
self.gridLayout_2.addWidget(self.releaseBetweenTasks, 6, 0, 1, 3)
self.releaseAfterSequence = QtGui.QRadioButton(Form)
self.releaseAfterSequence.setChecked(True)
self.releaseAfterSequence.setObjectName(_fromUtf8("releaseAfterSequence"))
self.releaseButtonGroup.addButton(self.releaseAfterSequence)
self.gridLayout_2.addWidget(self.releaseAfterSequence, 7, 0, 1, 3)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.groupBox.setTitle(_translate("Form", "Control Mode:", None))
self.powerWaveRadio.setText(_translate("Form", "Power waveform (W)", None))
self.switchWaveRadio.setText(_translate("Form", "Switch waveform (%)", None))
self.setWavelengthCheck.setText(_translate("Form", "Set wavelength", None))
self.wavelengthSpin.setSuffix(_translate("Form", " nm", None))
self.label_2.setText(_translate("Form", "Output Power:", None))
self.outputPowerLabel.setText(_translate("Form", "0mW", None))
self.checkPowerBtn.setText(_translate("Form", "Check Power", None))
self.label_3.setText(_translate("Form", "Power at Sample:", None))
self.samplePowerLabel.setText(_translate("Form", "0mW", None))
self.adjustLengthCheck.setToolTip(_translate("Form", "If the output power of the laser changes, adjust the length of laser pulses to maintain constant pulse energy.", None))
self.adjustLengthCheck.setText(_translate("Form", "Adjust pulse length if power changes", None))
self.checkPowerCheck.setText(_translate("Form", "Check power before task start", None))
self.releaseBetweenTasks.setText(_translate("Form", "Release between tasks", None))
self.releaseAfterSequence.setText(_translate("Form", "Release after sequence", None))
|
LAUDATIO-Repository/Version1.1 | app/webroot/js/creativecommons/license_xsl/licensexsl_tools/makerdf.py | Python | gpl-3.0 | 2,716 | 0.004786 | """
makerdf.py
Assemble RDF describing all available CC licenses using licenses.xml
as a source for all canonical license URIs.
Requires RDFlib (http://rdflib.net), lxml (http://codespeak.net/lxml).
(c) 2005-2006, Nathan R. Yergler, Creative Commons.
"""
__version__ = 0.5
from rdflib.Graph import Graph
import rdflib
import lxml.etree
import sys
import StringIO
import httplib
import urllib2
from optparse import make_option, OptionParser
def initOpts():
"""Assemble the option parser."""
option_list = [
make_option("-v", "--verbose",
action="store_true", dest="verbose", default=False,
help=""),
make_option("-l", "--licenses",
action="store", type="s | tring", dest="licenses_xml",
help="Use the specified licenses file.",
default="licenses.xml"),
make_option("-o", "--output",
action="store", type="string", dest="output_rdf",
help="Write the RDF to the specified file.",
default=""),
]
usage = "%prog [-v] [-l licenses.xml] [-o output.rdf]"
par | ser = OptionParser(usage=usage,
version="%%prog %s" % __version__,
option_list = option_list)
return parser
def assembleRDF(instream, outstream, verbose=False):
licenses = lxml.etree.parse(instream)
graph = Graph('default',"http://creativecommons.org/licenses/index.rdf")
uris = licenses.xpath('//jurisdiction/version/@uri')
for uri in uris:
if verbose:
print >>sys.stderr, 'Retrieving %srdf...' % uri
try:
rdfsource = rdflib.URLInputSource('%srdf' % uri)
except httplib.BadStatusLine, e:
print >>sys.stderr, 'Error retrieving %srdf; bad status line.' % uri
uris.append(uri)
continue
except urllib2.URLError, e:
print >>sys.stderr, 'URL error on %srdf.' % uri
uris.append(uri)
if verbose:
print >>sys.stderr, 'Parsing %srdf...' % uri
try:
graph.parse(rdfsource, publicID=uri)
except Exception, e:
print e
uris.append(uri)
graph.serialize(outstream)
def main():
"""Run the makerdf script."""
optparser = initOpts()
(options, args) = optparser.parse_args()
output = StringIO.StringIO()
assembleRDF(file(options.licenses_xml), output, options.verbose)
if options.output_rdf:
file(options.output_rdf, 'w').write(output.getvalue())
else:
print output.getvalue()
if __name__ == '__main__':
main()
|
johnson1228/pymatgen | pymatgen/analysis/pourbaix/tests/test_maker.py | Python | mit | 1,116 | 0.005376 | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import unicode_literals
import unittest
import os
from pymatgen.analysis.pourbaix.maker import PourbaixDiagram
from pymatgen.analysis.pourbaix.entry import PourbaixEntryIO
class TestPourbaixDiagram(unittest.TestCase):
def setUp(self):
module_dir = os.path.dirname(os.path.abspath(__file__))
(elements, entries) = PourbaixEntryIO.from_csv(os.path.join(module_dir,
"test_entries.csv"))
self._entries = entries
self._pd = PourbaixDiagram(entries)
self.list_of_stable_entries = ["ZnO(s)", " | ZnO2(s)", "Zn[2+]", "ZnHO2[-]", "ZnO2[2-]", "Zn(s)"]
def test_pourbaix_diagram(self):
self.assertEqual(len(self._pd.facets), 6, "Incorrect number of facets")
self.assertEqual(set([e.name for e in self._pd.stable_entries]),
set(self.list_of_stable_entries), "List | of stable entries does not match")
if __name__ == '__main__':
unittest.main()
|
halfline/gedit | plugins/externaltools/tools/windowactivatable.py | Python | gpl-2.0 | 7,385 | 0.002844 | # -*- coding: UTF-8 -*-
# Gedit External Tools plugin
# Copyright (C) 2005-2006 Steve Frécinaux <steve@istique.net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
__all__ = ('ExternalToolsPlugin', 'Manager', 'OutputPanel', 'Capture', 'UniqueById')
from gi.repository import GLib, Gio, GObject, Gtk, Gedit, PeasGtk
from .manager import Manager
from .library import ToolLibrary
from .outputpanel import OutputPanel
from .capture import Capture
from .functions import *
class ToolMenu(object):
def __init__(self, library, window, panel, menu):
super(ToolMenu, self).__init__()
self._library = library
self._window = window
self._panel = panel
self._menu = menu
self._action_tools = {}
self.update()
def deactivate(self):
self.remove()
def remove(self):
self._menu.remove_all()
for name, tool in self._action_tools.items():
self._window.remove_action(name)
if tool.shortcut:
app = Gio.Application.get_default()
app.remove_accelerator(tool.shortcut)
self._action_tools = {}
def _insert_directory(self, directory, menu):
for d in sorted(directory.subdirs, key=lambda x: x.name.lower()):
submenu = Gio.Menu()
menu.append_submenu(d.name.replace('_', '__'), submenu)
section = Gio.Menu()
submenu.append_section(None, section)
self._insert_directory(d, section)
for tool in sorted(directory.tools, key=lambda x: x.name.lower()):
action_name = 'external-tool_%X_%X' % (id(tool), id(tool.name))
self._action_tools[action_name] = tool
action = Gio.SimpleAction(name=action_name)
action.connect('activate', capture_menu_action, self._window, self._panel, tool)
self._window.add_action(action)
item = Gio.MenuItem.new(tool.name.replace('_', '__'), "win.%s" % action_name)
item.set_attribute_value("hidden-when", GLib.Variant.new_string("action-disabled"))
menu.append_item(item)
if tool.shortcut:
app = Gio.Application.get_default()
app.add_accelerator(tool.shortcut, "win.%s" % action_name, None)
def update(self):
self.remove()
self._insert_directory(self._library.tree, self._menu)
self.filter(self._window.get_active_document())
def filter_language(self, language, item):
if not item.languages:
return True
if not language and 'plain' in item.languages:
return True
if language and (language.get_id() in item.languages):
return True
else:
return False
def filter(self, document):
if document is None:
titled = False
remote = False
language = None
else:
titled = document.get_location() is not None
remote = not document.is_local()
language = document.get_language()
states = {
'always': True,
'all' : document is not None,
'local': titled and not remote,
'remote': titled and remote,
'titled': titled,
'untitled': not titled,
}
for name, tool in self._action_tools.items():
action = self._window.lookup_action(name)
if action:
action.set_enabled(states[tool.applicability] and
self.filter_language(language, tool))
# FIXME: restore the launch of the manager on configure using PeasGtk.Configurable
class WindowActivatable(GObject.Object, Gedit.WindowActivatable):
__gtype_name__ = "ExternalToolsWindowActivatable"
window = GObject.property(type=Gedit.Window)
def __init__(self):
GObject.Object.__init__(self)
self._manager = None
self._manager_default_size = None
self.menu = None
def do_activate(self):
# Ugly hack... we need to get access to the activatable to update the menuitems
self.window._external_tools_window_activatable = self
self._library = ToolLibrary()
action = Gio.SimpleAction(name="man | age_tools")
action.connect("activate", lambda action, parameter: self.open_dialog())
self.window.add_action(action)
self.gear_menu = self.extend_gear_menu("ext9")
item = Gio.MenuItem.new(_("Manage _External Tools..."), "win.manage_tools")
self.gear_menu.append_menu_item(item)
external_tools_submenu = Gio.Menu()
item = Gio.MenuItem.new_submenu(_("External _T | ools"), external_tools_submenu)
self.gear_menu.append_menu_item(item)
external_tools_submenu_section = Gio.Menu()
external_tools_submenu.append_section(None, external_tools_submenu_section)
# Create output console
self._output_buffer = OutputPanel(self.plugin_info.get_data_dir(), self.window)
self.menu = ToolMenu(self._library, self.window, self._output_buffer, external_tools_submenu_section)
bottom = self.window.get_bottom_panel()
bottom.add_titled(self._output_buffer.panel, "GeditExternalToolsShellOutput", _("Tool Output"))
def do_update_state(self):
if self.menu is not None:
self.menu.filter(self.window.get_active_document())
def do_deactivate(self):
self.window._external_tools_window_activatable = None
self.menu.deactivate()
self.window.remove_action("manage_tools")
bottom = self.window.get_bottom_panel()
bottom.remove(self._output_buffer.panel)
def open_dialog(self):
if not self._manager:
self._manager = Manager(self.plugin_info.get_data_dir())
if self._manager_default_size:
self._manager.dialog.set_default_size(*self._manager_default_size)
self._manager.dialog.connect('destroy', self.on_manager_destroy)
self._manager.connect('tools-updated', self.on_manager_tools_updated)
window = Gio.Application.get_default().get_active_window()
self._manager.run(window)
return self._manager.dialog
def update_manager(self, tool):
if self._manager:
self._manager.tool_changed(tool, True)
def on_manager_destroy(self, dialog):
self._manager_default_size = self._manager.get_final_size()
self._manager = None
def on_manager_tools_updated(self, manager):
for window in Gio.Application.get_default().get_windows():
window._external_tools_window_activatable.menu.update()
# ex:ts=4:et:
|
zedshaw/learn-python3-thw-code | ex4.py | Python | mit | 566 | 0.001767 | cars = 100
space_in_a_car = 4.0
drivers = 30
passengers = 90
cars_not_driven = cars - drivers
cars_driven = drivers
carpool_capacity = cars_driven * space_in_a_car
average_passe | ngers_per_car = passengers / cars_driven
print("There are", cars, "cars available.")
print("There are only", drivers, "driver | s available.")
print("There will be", cars_not_driven, "empty cars today.")
print("We can transport", carpool_capacity, "people today.")
print("We have", passengers, "to carpool today.")
print("We need to put about", average_passengers_per_car, "in each car.")
|
AleksNeStu/ggrc-core | test/integration/ggrc/converters/test_import_assessments.py | Python | apache-2.0 | 19,212 | 0.001457 | # Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# pylint: disable=maybe-no-member, invalid-name
"""Test request import and updates."""
import csv
from collections import OrderedDict
from cStringIO import StringIO
from itertools import izip
from flask.json import dumps
from ggrc import db
from ggrc import models
from ggrc.converters import errors
from integration.ggrc.models import factories
from integration.ggrc import TestCase
from integration.ggrc.generator import ObjectGenerator
class TestAssessmentImport(TestCase):
"""Basic Assessment import tests with.
This test suite should test new Assessment imports, exports, and updates.
The main focus of these tests is checking error messages for invalid state
transitions.
"""
def setUp(self):
"""Set up for Assessment test cases."""
super(TestAssessmentImport, self).setUp()
self.client.get("/login")
def test_import_assessments_with_templates(self):
"""Test importing of assessments with templates."""
self.import_file("assessment_template_no_warnings.csv")
response = self.import_file("assessment_with_templates.csv")
self._check_csv_response(response, {})
assessment = models.Assessment.query.filter(
models.Assessment.slug == "A 4").first()
values = set(v.attribute_value for v in assessment.custom_attribute_values)
self.assertIn("abc", values)
self.assertIn("2015-07-15", values)
def _test_assessment_users(self, asmt, users):
""" Test that all users have correct roles on specified Assessment"""
verification_errors = ""
for user_name, expected_types in users.items():
try:
user = models.Person.query.filter_by(name=user_name).first()
rel = models.Relationship.find_related(asmt, user)
if expected_types:
self.assertNotEqual(
rel, None,
"User {} is not mapped to {}".format(user.email, asmt.slug))
self.assertIn("AssigneeType", rel.relationship_attrs)
self.assertEqual(
set(rel.relationship_attrs[
"AssigneeType"].attr_value.split(",")),
expected_types
)
else:
self.assertEqual(
rel, None,
"User {} is mapped to {}".format(user.email, asmt.slug))
except AssertionError as error:
verification_errors += "\n\nChecks for Users-Assessment mapping "\
"failed for user '{}' with:\n{}".format(user_name, str(error))
self.assertEqual(verification_errors, "", verification_errors)
def test_assessment_full_no_warnings(self):
""" Test full assessment import with no warnings
CSV sheet:
https://docs.google.com/spreadsheets/d/1Jg8jum2eQfvR3kZNVYbVKizWIGZXvfqv3yQpo2rIiD8/edit#gid=704933240&vpid=A7
"""
response = self.import_file("assessment_full_no_warnings.csv")
self._check_csv_response(response, {})
# Test first Assessment line in the CSV file
asmt_1 = models.Assessment.query.filter_by(slug="Assessment 1").first()
users = {
"user 1": {"Assessor"},
"user 2": {"Assessor", "Creator"}
}
self._test_assessment_users(asmt_1, users)
self.assertEqual(asmt_1.status, models.Assessment.START_STATE)
# Test second Assessment line in the CSV file
asmt_2 = models.Assessment.query.filter_by(slug="Assessment 2").first()
users = {
"user 1": {"Assessor"},
"user 2": {"Creator"},
"user 3": {},
"user 4": {},
"user 5": {},
}
self._test_assessment_users(asmt_2, users)
self.assertEqual(asmt_2.status, models.Assessment.PROGRESS_STATE)
audit = [obj for obj in asmt_1.related_objects() if obj.type == "Audit"][0]
self.assertEqual(audit.context, asmt_1.context)
evidence = models.Document.query.filter_by(title="some title 2").first()
self.assertEqual(audit.context, evidence.context)
def test_assessment_import_states(self):
""" Test Assessment state imports
These tests are an intermediate part for zucchini release and will be
updated in the next release.
CSV sheet:
https://docs.google.com/spreadsheets/d/1Jg8jum2eQfvR3kZNVYbVKizWIGZXvfqv3yQpo2rIiD8/edit#gid=299569476
"""
self.import_file("assessment_full_no_warnings.csv")
response = self.import_file("assessment_update_intermediate.csv")
expected_errors = {
"Assessment": {
"block_errors": set(),
"block_warnings": set(),
"row_errors": set(),
"row_warnings": set(),
}
}
self._check_csv_response(response, expected_errors)
assessments = {r.slug: r for r in models.Assessment.query.all()}
self.assertEqual(assessments["Assessment 60"].status,
models.Assessment.START_STATE)
self.assertEqual(assessments["Assessment 61"].status,
models.Assessment.PROGRESS_STATE)
self.assertEqual(assessments["Assessment 62"].status,
models.Assessment.DONE_STATE)
self.assertEqual(assessments["Assessment 63"].status,
models.Assessment.FINAL_STATE)
self.assertEqual(assessments["Assessment 64"].status,
models.Assessment.FINAL_STATE)
self.assertEqual(assessments["Assessment 3"].status,
models.Assessment.FINAL_STATE)
self.assertEqual(assessments["Assessment 4"].status,
models.Assessment.FINAL_STATE)
# Check that there is only one attachment left
asmt1 = assessments["Assessment 1"]
self.assertEqual(len(asmt1.documents), 1)
# Check that there are only the two new URLs present in asessment 1
url_titles = set(obj.title for obj in asmt1.related_objects()
if isinstance(obj, models.Document))
self.assertEqual(url_titles, set(["a.b.com", "c d com"]))
def test_assessment_warnings_errors(self):
""" Test full assessment import with warnings and errors
CSV sheet:
https://docs.google.com/spreadsheets/d/1Jg8jum2eQfvR3kZNVYbVKizWIGZXvfqv3yQpo2rIiD8/edit#gid=889865936
"""
self.import_file("assessment_full_no_warnings.csv")
response = self.import_file("assessment_with_warnings_and_errors.csv")
expected_errors = {
"Assessment": {
"block_errors": set([]),
"block_warnings": {
errors.UNKNOWN_COLUMN.format(
line=2,
column_name="error description - non existing column will "
"be ignored"
),
errors.UNKNOWN_COLUMN.format(
line=2,
column_name="actual error message"
),
errors.UNKNOWN_COLUMN.format(
line=2,
column_name="map:project"
),
},
"row_errors": {
errors.MISSING_VALUE_ERROR.format(
line=19,
column_name="Audit"
),
errors.DUPLICATE_VALUE_IN_CSV.format(
line_list="20, 22",
column_name="Code",
value="Assessment 22",
s="",
| ignore_lines="22",
),
},
"row_warnings": {
errors.UNKNOWN_OBJECT.format(
line=19,
object_type="Audit",
| slug="not existing"
),
errors.WRONG_VALUE_DEFAULT.format(
line=20,
column_name="State",
value="open",
),
},
}
}
self._check_csv_response(response, expected_errors)
def test_mapping_control_through_snapshot(self):
"Test for add mapping control on assessment"
audit = factories.AuditFactory()
assessment = factories.AssessmentFactory(audit=audit)
factories.RelationshipFactory(source=audit, destination=assessment)
control = factories.ControlFactory()
revision = models.Revision.query.filter(
models.Re |
kerel-fs/skylines | skylines/api/views/errors.py | Python | agpl-3.0 | 1,466 | 0 | from werkzeug.exceptions import HTTPException, InternalServerError
from .json import jsonify
def register(app):
"""
Register error handlers on the given app
:type app: flask.Flask
"""
@app.errorhandler(400)
@app.erro | rhandler(401)
@app.errorhandler(403)
@app.errorhandler(404)
@app.errorhandler(405)
@app.errorhandler(500)
def handle_http_error(e):
if not isinstance(e, HTTPException):
e = InternalServerError()
data = getattr(e, 'data', None)
if data:
message = data['message']
else:
message = e.description
return jsonify({
'm | essage': message,
}, status=e.code)
@app.errorhandler(422)
def handle_bad_request(err):
# webargs attaches additional metadata to the `data` attribute
data = getattr(err, 'data')
if data:
# Get validations from the ValidationError object
messages = data['exc'].messages
else:
messages = ['Invalid request']
return jsonify({
'messages': messages,
}, status=422)
@app.errorhandler(TypeError)
@app.errorhandler(ValueError)
def raise_bad_request(e):
return jsonify({
'message': e.message,
}, status=400)
@app.errorhandler(LookupError)
def raise_not_found(e):
return jsonify({
'message': e.message,
}, status=404)
|
piglei/uwsgi-sloth | uwsgi_sloth/template.py | Python | apache-2.0 | 1,040 | 0.002885 | # -*- coding: utf-8 -*-
"""Template shortcut & filters"""
import os
import datetime
from jinja2 import Environment, FileSystemLoader
from uwsgi_sloth.settings import ROOT
from uwsgi_sloth import settings, __VERSION__
template_path = os.path.join(ROOT, 'templates')
env = Environment(loader=FileSystemLoader(template_path))
# Template filters
def friendly_time(msecs):
secs, msecs = divmod(msecs, 1000)
mins, secs = divmod(secs, 60)
hours, | mins = divmod(mins, 60)
if hours:
return '%dh%dm%ds' % (hours, mins, secs)
elif mins:
return '%dm%ds' % (mins, secs)
elif secs:
return '%ds%dms' % (secs, msecs)
else:
return '%.2fms' % msecs
env.filters['friendly_time'] = friendly_time
def render_template(template_name, context={}):
template = env.get_template(template_name)
context.update(
SETTINGS=settings,
| now=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
version='.'.join(map(str, __VERSION__)))
return template.render(**context)
|
wrboyce/ec2hashcat | ec2hashcat/aws/s3.py | Python | apache-2.0 | 5,740 | 0.002265 | """ Copyright 2015 Will Boyce """
from __future__ import print_function
import os
import re
from boto3.session import Session
from ec2hashcat import exceptions
class S3Bucket(object):
types = ('hashlists', 'dumps', 'wordlists', 'rules')
def __init__(self, cfg):
self.cfg = cfg
aws = Session(aws_access_key_id=self.cfg.aws_key,
aws_secret_access_key=self.cfg.aws_secret,
region_name=self.cfg.aws_region)
self.s3_client = aws.client('s3')
self.bucket = aws.resource('s3').create_bucket(Bucket=self.cfg.s3_bucket)
def __getattr__(self, name):
types = [t.rstrip('s') for t in self.types]
attr_rx = [
re.compile('(?P<before>download|get|put)_(?P<type>{})_?(?P<after>objects|s|)'.format('|'.join(types))),
re.compile('(?P<type>{})_(?P<after>exists)'.format('|'.join(types)))]
for rgx in attr_rx:
match = rgx.match(name)
if match:
groups = match.groupdict()
groups['type'] = '{}s'.format(groups['type'])
if groups['after'] == 's':
groups['after'] = 'list'
attr = ['']
for key in ('before', 'after'):
if key in groups and groups[key]:
| attr.append(groups[key])
attr = '_'.join(attr)
return self.__getattribute__(attr)(groups['type'])
raise AttributeError("'{}' object has no attribute '{}'".format(
self.__class__.__name__, name))
def __dir__(self):
funcs = [
'delete_object',
'download_object',
'get_object',
'get_objects',
'get_object_list',
'object_exists', |
'put_object']
func_templates = ('delete_{}', 'download_{}', 'get_{}', 'get_{}s', 'get_{}_objects', '{}_exists', 'put_{}')
obj_types = [t[:-1] for t in self.types]
func_matrix = zip(sorted(func_templates * len(obj_types)), obj_types * len(func_templates))
for func_template, obj_type in func_matrix:
funcs.append(func_template.format(obj_type))
return sorted(funcs)
def _delete_object(self, object_type):
""" Handler for 'download_<type>' """
return lambda name: self.download_object(object_type, name)
def _download_object(self, object_type):
""" Handler for 'download_<type>' """
return lambda remote, local=None: self.download_object(object_type, remote, local)
def _get_object(self, object_type):
""" Handler for 'get_<type>' """
return lambda name: self.get_object(object_type, name)
def _get_objects(self, object_type):
""" Handler for 'get_<type>s' """
return lambda: self.get_objects(object_type)
def _get_list(self, object_type):
""" Handler for 'get_<type>_list' """
return lambda: self.get_object_list(object_type)
def _exists(self, object_type):
""" Handler for '<type>_exists' """
return lambda name: self.object_exists(object_type, name)
def _put(self, object_type):
""" Handler for 'put_<type>' """
return lambda local, remote=None: self.put_object(object_type, local, remote)
def delete_object(self, object_type, name):
if not self.object_exists(object_type, name):
raise exceptions.S3FileNotFoundError(object_type, name, self.cfg.s3_bucket)
name = os.path.join(object_type, name)
print("rm s3://{}/{}".format(self.cfg.s3_bucket, name))
self.s3_client.delete_object(Bucket=self.cfg.s3_bucket, Key=name)
def download_object(self, object_type, remote, local=None, quiet=False):
if not self.object_exists(object_type, remote):
raise exceptions.S3FileNotFoundError(object_type, remote, self.cfg.s3_bucket)
if local is None:
local = os.path.basename(remote)
remote = os.path.join('{}'.format(object_type), remote)
if not quiet:
print("s3://{}/{} -> {}".format(self.cfg.s3_bucket, remote, local))
self.s3_client.download_file(
Bucket=self.cfg.s3_bucket,
Key=remote,
Filename=local)
def get_object(self, object_type, name):
""" Get an object representing the specified file on S3 """
name = os.path.join(object_type, name)
for obj in self.bucket.objects.filter(Prefix=name):
if obj.key == name:
return obj
return exceptions.S3FileNotFoundError(object_type, name, self.cfg.s3_bucket)
def get_objects(self, object_type):
""" Return objects representing all files of a given type in S3 """
return [obj for obj in self.bucket.objects.filter(Prefix='{}/'.format(object_type))
if obj.key != '{}/'.format(object_type)]
def get_object_list(self, object_type):
""" List all filenames of a given type in S3 """
return [o.key.split('/', 1)[1] for o in self.get_objects(object_type)]
def object_exists(self, object_type, name):
""" Check if a file exists in S3 """
return name in self.get_object_list(object_type)
def put_object(self, object_type, local, remote=None):
""" Upload the specified file to S3 """
if not os.path.isfile(local):
raise exceptions.FileNotFoundError(local)
if remote is None:
remote = local
remote = os.path.join('{}'.format(object_type), os.path.basename(remote))
print("{} -> s3://{}/{}".format(local, self.cfg.s3_bucket, remote))
return self.s3_client.upload_file(Filename=local, Bucket=self.cfg.s3_bucket, Key=remote)
|
arubertoson/piemenu | piemenu/menusystem/node.py | Python | gpl-2.0 | 4,177 | 0 | #! usr/bin/env python2
from PySide import QtGui, QtCore
from PySide.QtCore import Qt
from settings import Icon
class Node(object):
Command, Form, Separator = 'command', 'form', 'separator'
@classmethod
def from_type(cls, type_, **kw):
cls = Command if kw['type'] == cls.Command else Form
return cls(**kw)
def __init__(self, **kw):
self._parent = kw.get('parent', None)
self._type = kw.get('type', None)
self._style = kw.get('style', None)
self._label = kw.get('label', '')
self._description = kw.get('description', '')
self._items = kw.get('items', [])
self._qactive = True
self.setParent(self._parent)
@property
def qactive(self):
return self._qactive
@qactive.setter
def qactive(self, value):
self._qactive = value
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.label())
def __len__(self):
return len(self._items)
def __contains__(self, item):
return item in self._items
def label(self):
return self._label
def description(self):
return self._description
def type(self):
return self._type
def style(self):
return self._style
def parent(self):
return self._parent
def items(self):
return self._items
def setData(self, column, value):
if not column == 0:
return False
self._label = value
return True
def setParent(self, parent):
if parent is not None:
self._parent = parent
self._parent.appendItem(self)
else:
self._parent = None
def getItem(self, row):
if len(self._items) > row:
return self._items[row]
def getItemAtRow(self, child):
for i, item in enumerate(self._items):
if item == child:
return i
return -1
def appendItem(self, node):
self._items.append(node)
def insertItem(self, row, node):
if row < 0 or row > len(self.items()):
return False
self._items.insert(row, node)
node._parent = self
return True
def removeItem(self, row):
item = self._items.pop(row)
item._parent = None
return True
def columnCount(self):
return len(self._headers)
def row(self):
if self._parent is not None:
return self._parent._items.index(self)
return -1
def sizeHint(self):
return QtCore.QSize(16, 16)
def log(self, level=-1):
level += 1
outp | ut = ['\t' for i in range(level)]
output.append(self._label if self._parent is not None else 'Root')
output.append('\n')
output.extend([item.log(level) for item in self._items])
level -= 1
return ''.join(output)
class Command(Node):
def __init__(self, **kw):
super(Command, self).__init__(**kw)
self._type = Node.Command
| self._icon = kw.get('icon', '')
self._command = kw.get('command', '')
self._sub_command = kw.get('sub_command', '')
self._label = 'Untitled Command' if self._label == '' else self._label
def icon(self):
return self._icon
def command(self):
return self._command
def subCommand(self):
return self._sub_command
class Form(Node):
def __init__(self, **kw):
super(Form, self).__init__(**kw)
self._type = Node.Form
self._icon_mode = kw.get('icon_mode', 0)
self._icon_size = kw.get('icon_size', 4)
self._label = 'Untitled Form' if self._label == '' else self._label
def iconMode(self):
return self._icon_mode
def iconSize(self):
return self._icon_size
class Separator(Node):
def __init__(self, **kw):
super(Separator, self).__init__(**kw)
self._type = Node.Separator
self._label = 'separator'
|
freedesktop-unofficial-mirror/papyon | papyon/service/AddressBook/scenario/contacts/messenger_contact_add.py | Python | gpl-2.0 | 3,036 | 0.002306 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2007 Johann Prieur <johann.prieur@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
from papyon.service.AddressBook.scenario.base import BaseScenario
from papyon.service.AddressBook.scenario.base import Scenario
from papyon.service.description.AB.constants import ContactEmailType
from papyon.profile import ContactType, NetworkID
__all__ = ['MessengerContactAddScenario']
class MessengerContactAddScenario(BaseScenario):
def __init__(self, ab, callback, errback,
account='',
network_id=NetworkID.MSN,
contact_type=ContactType.REGULAR,
contact_info={},
invite_display_name='',
invite_message=''):
"""Adds a messenger contact and updates the address book.
@param ab: the address book service
@param callback: tuple(callable, *args)
@param errback: tuple(callable, *args)"""
BaseScenario.__init__(self, Scenario.CONTACT_SAVE, callback, errback)
self._ab = ab
self.account = account
self.network_id = network_id
self.contact_type = contact_type
self.contact_info = contact_info
self.invite_display_name = invite_display_name
self.invite_message = invite_message
self.auto_manage_allow_list = True
def execute(self):
invite_info = { 'display_name' : self.invite_display_name,
'invite_message' : self.invite_message }
if self.network_id == NetworkID.MSN:
self.contact_info['passport_name'] = self.account
| self.contact_info['contact_type'] = self.contact_type
self.contact_info['is_messenger_user'] = True
elif self.network_id == NetworkID.EXTERNAL:
self.contact_info.setdefault('email', {})[ContactEmailType.EXTERNAL] = self.account
self.contact_info['capability'] = self.network_id
else:
raise NotImplementedError("Network ID '%s' is not implemented" %
self.network_id) |
self._ab.ContactAdd(self._callback,
self._errback,
self._scenario,
self.contact_info,
invite_info,
self.auto_manage_allow_list)
|
googleapis/python-tpu | google/cloud/tpu_v2alpha1/services/tpu/async_client.py | Python | apache-2.0 | 54,857 | 0.001367 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.tpu_v2alpha1.services.tpu import pagers
from google.cloud.tpu_v2alpha1.types import cloud_tpu
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
from .transports.base import TpuTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import TpuGrpcAsyncIOTransport
from .client import TpuClient
class TpuAsyncClient:
"""Manages TPU nodes and other resources
TPU API v2alpha1
"""
_client: TpuClient
DEFAULT_ENDPOINT = TpuClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = TpuClient.DEFAULT_MTLS_ENDPOINT
accelerator_type_path = staticmethod(TpuClient.acceler | ator_type_path)
parse_accelerator_type_path = staticmethod(TpuClient.parse_accelerator_type_path)
node_path = staticmethod(TpuClient.node_path)
parse_node_path = staticmethod(TpuClient.parse_node_path)
runtime_version_path = staticmethod(TpuClient.runtime_version_path)
parse_runtime_version_path = staticmethod(TpuClient.parse_runtime_version_path)
common_billing_account_path = staticmethod(TpuClient.common_billing_account_path)
parse_ | common_billing_account_path = staticmethod(
TpuClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(TpuClient.common_folder_path)
parse_common_folder_path = staticmethod(TpuClient.parse_common_folder_path)
common_organization_path = staticmethod(TpuClient.common_organization_path)
parse_common_organization_path = staticmethod(
TpuClient.parse_common_organization_path
)
common_project_path = staticmethod(TpuClient.common_project_path)
parse_common_project_path = staticmethod(TpuClient.parse_common_project_path)
common_location_path = staticmethod(TpuClient.common_location_path)
parse_common_location_path = staticmethod(TpuClient.parse_common_location_path)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
TpuAsyncClient: The constructed client.
"""
return TpuClient.from_service_account_info.__func__(TpuAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
TpuAsyncClient: The constructed client.
"""
return TpuClient.from_service_account_file.__func__(TpuAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return TpuClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> TpuTransport:
"""Returns the transport used by the client instance.
Returns:
TpuTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(TpuClient).get_transport_class, type(TpuClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, TpuTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the tpu client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.TpuTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default |
npinchot/djangocon_2015_talk | manage.py | Python | mit | 274 | 0.00365 | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault | ("DJANGO_SETTINGS_MODULE", "e_commerce_with_django_at_scale.settings")
from django.core.management imp | ort execute_from_command_line
execute_from_command_line(sys.argv)
|
joopert/home-assistant | tests/components/jewish_calendar/test_sensor.py | Python | apache-2.0 | 19,613 | 0.000982 | """The tests for the Jewish calendar sensors."""
from datetime import timedelta
from datetime import datetime as dt
import pytest
import homeassistant.util.dt as dt_util
from homeassistant.setup import async_setup_component
from homeassistant.components import jewish_calendar
from tests.common import async_fire_time_changed
from . import alter_time, make_nyc_test_params, make_jerusalem_test_params
async def test_jewish_calendar_min_config(hass):
"""Test minimum jewish calendar configuration."""
assert await async_setup_component(
hass, jewish_calendar.DOMAIN, {"jewish_calendar": {}}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.jewish_calendar_date") is not None
async def test_jewish_calendar_hebrew(hass):
"""Test jewish calendar sensor with language set to hebrew."""
assert await async_setup_component(
hass, jewish_calendar.DOMAIN, {"jewish_calendar": {"language": "hebrew"}}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.jewish_calendar_date") is not None
TEST_PARAMS = [
(dt(2018, 9, 3), "UTC", 31.778, 35.235, "english", "date", False, "23 Elul 5778"),
(
dt(2018, 9, 3),
"UTC",
31.778,
35.235,
"hebrew",
"date",
False,
'כ"ג אלול ה\' תשע"ח',
),
(dt(2018, 9, 10), "UTC", 31.778, 35.235, "hebrew", "holiday", False, "א' ראש השנה"),
(
dt(2018, 9, 10),
"UTC",
31.778,
35.235,
"english",
"holiday",
False,
"Rosh Hashana I",
),
(
dt(2018, 9, 8),
"UTC",
31.778,
35.235,
"hebrew",
"parshat_hashavua",
False,
"נצבים",
),
(
dt(2018, 9, 8),
"America/New_York",
40.7128,
-74.0060,
"hebrew",
"t_set_hakochavim",
True,
dt(2018, 9, 8, 19, 48),
),
(
dt(2018, 9, 8),
"Asia/Jerusalem",
31.778,
35.235,
"hebrew",
"t_set_hakochavim",
False,
dt(2018, 9, 8, 19, 21),
),
(
dt(2018, 10, 14),
"Asia/Jerusalem",
31.778,
35.235,
"hebrew",
"parshat_hashavua",
False,
"לך לך",
),
(
dt(2018, 10, 14, 17, 0, 0),
"Asia/Jerusalem",
31.778,
35.235,
"hebrew",
"date",
False,
"ה' מרחשוון ה' תשע\"ט",
),
(
dt(2018, 10, 14, 19, 0, 0),
"Asia/Jerusalem",
31.778,
35.235,
"hebrew",
"date",
False,
"ו' מרחשוון ה' תשע\"ט",
),
]
TEST_IDS = [
"date_output",
"date_output_hebrew",
"holiday",
"holiday_english",
"torah_reading",
"first_stars_ny",
"first_stars_jerusalem",
"torah_reading_weekday",
"date_before_sunset",
"date_after_sunset",
]
@pytest.mark.parametrize(
[
"now",
"tzname",
"latitude",
"longitude",
"language",
"sensor",
"diaspora",
"result",
],
TEST_PARAMS,
ids=TEST_IDS,
)
async def test_jewish_calendar_sensor(
hass, now, tzname, latitude, longitude, language, sensor, diaspora, result
):
"""Test Jewish calendar sensor output."""
time_zone = dt_util.get_time_zone(tzname)
test_time = time_zone.localize(now)
hass.config.time_zone = time_zone
hass.config.latitude = latitude
hass.config.longitude = longitude
with alter_time(test_time):
assert await async_setup_component(
hass,
jewish_calendar.DOMAIN,
{
"jewish_calendar": {
"name": "test",
"language": language,
"diaspora": diaspora,
}
},
)
await hass.async_block_till_done()
future = dt_util.utcnow() + timedelta(seconds=30)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
result = (
dt_util.as_utc(time_zone.localize(result)) if isinstance(result, dt) else result
)
sensor_object = hass.states.get(f"sensor.test_{sensor}")
assert sensor_object.state == str(result)
if sensor == "holiday":
assert sensor_object.attributes.get("id") == "rosh_hashana_i"
assert sensor_object.attributes.get("type") == "YOM_TOV"
assert sensor_object.attributes.get("type_id") == 1
SHABBAT_PARAMS = [
make_nyc_test_params(
dt(2018, 9, 1, 16, 0),
{
"english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15),
"english_upcoming_havdalah": dt(2018, 9, 1, 20, 14),
"english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15),
"english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14),
"english_parshat_hashavua": "Ki Tavo",
"hebrew_parshat_hashavua": "כי תבוא",
},
),
make_nyc_test_params(
dt(2018, 9, 1, 16, 0),
{
"english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15),
"english_upcoming_havdalah": dt(2018, 9, 1, 20, 22),
"english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15),
"english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 22),
"english_parshat_hasha | vua": "Ki Tavo",
"hebrew_parshat_hashavua": "כי תבוא",
},
havdalah_offset=50,
),
make_nyc_test_params(
dt(2018, 9, 1, 20, 0),
{
"english_upcoming_shabbat_candle_lighting": dt(2018, 8, 31, 19, 15),
"english_upcoming_shabbat_havdalah": dt(2018, 9, 1, 20, 14),
"english_upcoming_candle_lighting": dt(2018, 8, 31, 19, 15),
"english_upcoming_havdalah": dt(2018, 9, 1, 20, 14),
"englis | h_parshat_hashavua": "Ki Tavo",
"hebrew_parshat_hashavua": "כי תבוא",
},
),
make_nyc_test_params(
dt(2018, 9, 1, 20, 21),
{
"english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4),
"english_upcoming_havdalah": dt(2018, 9, 8, 20, 2),
"english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4),
"english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2),
"english_parshat_hashavua": "Nitzavim",
"hebrew_parshat_hashavua": "נצבים",
},
),
make_nyc_test_params(
dt(2018, 9, 7, 13, 1),
{
"english_upcoming_candle_lighting": dt(2018, 9, 7, 19, 4),
"english_upcoming_havdalah": dt(2018, 9, 8, 20, 2),
"english_upcoming_shabbat_candle_lighting": dt(2018, 9, 7, 19, 4),
"english_upcoming_shabbat_havdalah": dt(2018, 9, 8, 20, 2),
"english_parshat_hashavua": "Nitzavim",
"hebrew_parshat_hashavua": "נצבים",
},
),
make_nyc_test_params(
dt(2018, 9, 8, 21, 25),
{
"english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1),
"english_upcoming_havdalah": dt(2018, 9, 11, 19, 57),
"english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52),
"english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50),
"english_parshat_hashavua": "Vayeilech",
"hebrew_parshat_hashavua": "וילך",
"english_holiday": "Erev Rosh Hashana",
"hebrew_holiday": "ערב ראש השנה",
},
),
make_nyc_test_params(
dt(2018, 9, 9, 21, 25),
{
"english_upcoming_candle_lighting": dt(2018, 9, 9, 19, 1),
"english_upcoming_havdalah": dt(2018, 9, 11, 19, 57),
"english_upcoming_shabbat_candle_lighting": dt(2018, 9, 14, 18, 52),
"english_upcoming_shabbat_havdalah": dt(2018, 9, 15, 19, 50),
"english_parshat_hashavua": "Vayeilech",
"hebrew_parshat_hashavua": "וילך",
"english_holiday": "Rosh Hashana I",
"hebrew_holiday": "א' ראש השנה",
},
),
make_nyc_test_params(
dt(2018, 9, 10, 21, 25),
{
"english |
FireballDWF/cloud-custodian | tools/c7n_mailer/c7n_mailer/datadog_delivery.py | Python | apache-2.0 | 4,461 | 0.000897 | # Copyright 2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
from datadog import api
from datadog import initialize
from six.moves.urllib.parse import urlparse, parse_qsl
class DataDogDelivery(object):
DATADOG_API_KEY = 'datadog_api_key'
DATADOG_APPLICATION_KEY = 'datadog_application_key'
def __init__(self, config, session, logger):
self.config = config
self.logger = logger
self.session = session
self.datadog_api_key = self.config.get(self.DATADOG_API_KEY, None)
self.datadog_application_key = self.config.get(self.DATADOG_APPLICATION_KEY, None)
# Initialize datadog
if self.datadog_api_key and self.datadog_application_key:
options = {
'api_key': self.datadog_api_key,
'app_key': self.datadog_application_key,
}
initialize(**options)
def get_datadog_message_packages(self, sqs_message):
date_time = time.time()
datadog_rendered_messages = []
metric_config_map = self._get_metrics_config_to_resources_map(sqs_message)
if not metric_config_map:
return datadog_rendered_messages
if sqs_message and sqs_message.get('resources', False):
for resource in sqs_message['resources']:
tags = [
'event:{}'.format(sqs_message['event']),
'account_id:{}'.format(sqs_message['account_id']),
'account:{}'.format(sqs_message['account']),
'region:{}'.format(sqs_message['region'])
]
tags.extend(['{key}:{value}'.format(
key=key, value=resource[key]) for key in resource.keys()
if key != 'Tags'])
if resource.get('Tags', False):
tags.extend(['{key}:{value}'.format(
key=tag['Key'], value=tag['Value']) for tag in resource['Tags']])
for metric_config in metric_config_map:
datadog_rendered_messages.append({
"metric": metric_config['metric_name'],
"points": (date_time, self._get_metric_value(
metric_config=metric_config, tags=tags)),
"tags": tags
})
# eg: [{'metric': 'metric_name', 'points': (date_time, value),
# 'tags': ['tag1':'value', 'tag2':'value']}, ...]
return datadog_rendered_messages
def deliver_datadog_messages(self, datadog_message_packages, sqs_message):
if len(datadog_message_packages) > 0:
self.logger.info(
"Sending account:{account} policy:{policy} {resource}:{quantity} to DataDog".
format(account=sqs_message.get('account', ''),
policy=sqs_message['policy']['name'],
resource=sqs_message['policy']['resource'],
quantity=len(sqs_message['resources'])))
api.Metric.send(datadog_message_packages)
@staticmethod
def _get_metric_value(metric_config, tags):
metric_value = 1
metric_value_tag = metric_config.get('metric_value_tag', 'default')
if metric_value_tag != 'default':
for tag in tags:
if metric_value_tag in tag:
metric_value = float(tag[tag.find(":") + 1:])
return metric_value
@staticmethod
def _get_metrics_config_to_resources_map(sqs_message):
metric_config_map = []
| if sqs_message and sqs_message.get(
'action', False) and sqs_message['action'].get('to', False):
for to in sqs_message['action']['to']:
if to.startswith('datadog://'):
parsed = urlparse(to)
metric_config_map.app | end(dict(parse_qsl(parsed.query)))
return metric_config_map
|
Jumpscale/web | pythonlib/flask_admin/base.py | Python | apache-2.0 | 16,504 | 0.001636 | from functools import wraps
from flask import Blueprint, render_template, abort, g
from flask.ext.admin import babel
from flask.ext.admin._compat import with_metaclass
from flask.ext.admin import helpers as h
# For compatibility reasons import MenuLink
from flask.ext.admin.menu import MenuCategory, MenuView, MenuLink
def expose(url='/', methods=('GET',)):
"""
Use this decorator to expose views in your view classes.
:param url:
Relative URL for the view
:param methods:
Allowed HTTP methods. By default only GET is allowed.
"""
def wrap(f):
if not hasattr(f, '_urls'):
f._urls = []
f._urls.append((url, methods))
return f
return wrap
def expose_plugview(url='/'):
"""
Decorator to expose Flask's pluggable view classes
(``flask.views.View`` or ``flask.views.MethodView``).
:param url:
Relative URL for the view
.. versionadded:: 1.0.4
"""
def wrap(v):
handler = expose(url, v.methods)
if hasattr(v, 'as_view'):
return handler(v.as_view(v.__name__))
else:
return handler(v)
return wrap
# Base views
def _wrap_view(f):
@wraps(f)
def inner(self, *args, **kwargs):
# Store current admin view
h.set_current_view(self)
# Check if administrative piece is accessible
abort = self._handle_view(f.__name__, **kwargs)
if abort is not None:
return abort
return f(self, *args, **kwargs)
return inner
class AdminViewMeta(type):
"""
View metaclass.
Does some precalculations (like getting list of view methods from the class) to avoid
calculating them for each view class instance.
"""
def __init__(cls, classname, bases, fields):
type.__init__(cls, classname, bases, fields)
# Gather exposed views
cls._urls = []
cls._default_view = None
for p in dir(cls):
attr = getattr(cls, p)
if hasattr(attr, '_urls'):
# Collect methods
for url, methods in attr._urls:
cls._urls.append((url, p, methods))
if url == '/':
cls._default_view = p
# Wrap views
setattr(cls, p, _wrap_view(attr))
class BaseViewClass(object):
pass
class BaseView(with_metaclass(AdminViewMeta, BaseViewClass)):
"""
Base administrative view.
Derive from this class to implement your administrative interface piece. For example::
class MyView(BaseView):
@expose('/')
def index(self):
return 'Hello World!'
"""
@property
def _template_args(self):
"""
Extra template arguments.
If you need to pass some extra parameters to the template,
you can override particular view function, contribute
arguments you want to pass to the template and call parent view.
These arguments are local for this request and will be discarded
in the next request.
Any value passed through ``_template_args`` will override whatever
parent view function passed to the template.
For example::
class MyAdmin(ModelView):
@expose('/')
def index(self):
self._template_args['name'] = 'foobar'
self._template_args['code'] = '12345'
super(MyAdmin, self).index()
"""
args = getattr(g, '_admin_template_args', None)
if args is None:
args = g._admin_template_args = dict()
return args
def __init__(self, name=None, category=None, endpoint=None, url=None,
static_folder=None, static_url_path=None):
"""
Constructor.
:param name:
Name of this view. If not provided, will default to the class name.
:param category:
View category. If not provided, this view will be shown as a top-level menu item. Otherwise, it will
be in a submenu.
:param endpoint:
Base endpoint name for the view. For example, if there's a view method called "index" and
endpoint is set to "myadmin", you can use `url_for('myadmin.index')` to get the URL to the
view method. Defaults to the class name in lower case.
:param url:
Base URL. If provided, affects how URLs are generated. For example, if the url parameter
is "test", the resulting URL will look like "/admin/test/". If not provided, will
use endpoint as a base url. However, if URL starts with '/', absolute path is assumed
and '/admin/' prefix won't be applied.
:param static_url_path:
Static URL Path. If provided, this specifies the path to the static url directory.
:param debug:
Optional debug flag. If set to `True`, will rethrow exceptions in some cases, so Werkzeug
debugger can catch them.
"""
self.name = name
self.category = category
self.endpoint = endpoint
self.url = url
self.static_folder = static_folder
self.static_url_path = static_url_path
# Initialized from create_blueprint
self.admin = None
self.blueprint = None
# Default view
if self._default_view is None:
raise Exception(u'Attempted to instantiate admin view %s without default view' % self.__class__.__name__)
def create_blueprint(self, admin):
"""
Create Flask blueprint.
"""
# Store admin instance
self.admin = admin
# If endpoint name is not provided, get it from the class name
if self.endpoint is None:
self.endpoint = self.__class__.__name__.lower()
# If the static_url_path is not provided, use the admin's
if not self.static_url_path:
self.static_url_path = admin.static_url_path
# If url is not provided, generate it from endpoint name
if self.url is None:
if self.admin.url != '/':
self.url = '%s/%s' % (self.admin.url, self.endpoint)
else:
if self == admin.index_view:
self.url = '/'
else:
self.url = '/%s' % self.endpoint
else:
if not self.url.startswith('/'):
self.url = '%s/%s' % (self.admin.url, self.url)
# If we're working from the root of the site, set prefix to None
if self.url == '/':
self.url = None
# If name is not povided, use capitalized endpoint name
if self.name is None:
self.name = self._prettify_class_name(self.__class__.__name__)
# Create blueprint and register rules
self.blueprint = Blueprint(self.endpoint, __name__,
url_prefix=self.url,
subdomain=self.admin.subdomain,
template_folder='templates',
static_folder=self.static_folder,
static_url_path=self.static_url_path)
for url, name, methods in self._urls:
self.blueprint.add_url_rule(url,
name,
getattr(self, name),
methods=methods)
return self.blueprint
def render(self, template, **kwargs):
"""
Render template
:param template:
Template path to render
:param kwargs:
Template arguments
| """
# Store self as | admin_view
kwargs['admin_view'] = self
kwargs['admin_base_template'] = self.admin.base_template
# Provid |
LLNL/spack | var/spack/repos/builtin/packages/r-maps/package.py | Python | lgpl-2.1 | 902 | 0.004435 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Ap | ache-2.0 OR MIT)
from spack import *
class RMaps(RPackage):
"""Display of maps. Projection code and larger maps are in separate
packages ('m | approj' and 'mapdata')."""
homepage = "https://cloud.r-project.org/package=maps"
url = "https://cloud.r-project.org/src/contrib/maps_3.1.1.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/maps"
version('3.3.0', sha256='199afe19a4edcef966ae79ef802f5dcc15a022f9c357fcb8cae8925fe8bd2216')
version('3.2.0', sha256='437abeb4fa4ad4a36af6165d319634b89bfc6bf2b1827ca86c478d56d670e714')
version('3.1.1', sha256='972260e5ce9519ecc09b18e5d7a28e01bed313fadbccd7b06c571af349cb4d2a')
depends_on('r@3.0.0:', type=('build', 'run'))
|
trsheph/SynBioStandardizer | SynColi2/SyGS_v2.py | Python | bsd-2-clause | 3,078 | 0.01462 | #!/usr/bin/env python
#####
#
# Synthetic Biology Gene Standardizer
# Copyright (c) 2015, Tyson R. Shepherd, PhD
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are those
# of the authors and should not be interpreted as representing official policies,
# either expressed or implied, of Uppsala University.
#
#####
import copy
import sys
from Bio.Seq import Seq
from Bio.SeqRecord import SeqRecord
from Bio import SeqIO
import genestand2
import zeroRPC
#
# Open file to read in fasta sequences for modified and original records
#
fIn=open('MG1655_genes.txt','r')
geneName=[]
geneSeqIn=[]
geneSeqOut=[]
genes=[]
for line in fIn:
genes.append(line);
for i in range(0,int(len(genes)/2)):
geneName.append(genes[int(i*2)].split('>')[1].rstrip());
geneSeqIn.append(genes[int(i | *2+1)].rstrip());
fIn.close()
genes=[];
#
# Start your engines
#
stnds = ['N','BioB','BglB','MoClo','GB','Chi']
SynthRecs = []
q = 0
for p in geneSeqIn:
#
# Look for: non-ATG start codons, non-TAA stop codons,
# NdeI: NdeI
# BioBrick: EcoRI, SpeI, XbaI, PstI, mfeI, avrII, NheI, NsiI, SbfI, NotI, ApoI
# BglBrick: EcoRI, XhoI, BglII, BamHI,
# MoClo: | BbsI, BsaI, MlyI
# GoldenBraid: BsmI, BtgZI
# Chi sites
# Then makes non-conflicting point mutations to highest allowed codon usage
#
tmpGeneSeqOut=genestand2.refactor(geneName[q], p, stnds, 1)
geneSeqOut.append(genestand2.mutatePromoters(geneName[q], tmpGeneSeqOut))
q=q+1
# print(str(q)+'/'+str(len(geneName)));
#
# Print results
#
print('Success!')
#
genestand2.statistics();
#
outFile = "SyntheticSequence.txt"
output_handle = open(outFile, "w")
for i in range(len(geneName)):
output_handle.write('>'+geneName[i]+"\n"+geneSeqOut[i]+"\n")
|
leyyin/stk-stats | maint_graphics.py | Python | mit | 651 | 0.004608 | #!/usr/bin/env python
import os
import time
import django
from userreport import maint
os.environ['DJANGO_SETTINGS_MODULE'] = 'userreport.settings'
django.setup()
start_time = time.time()
remove_time, get_time, save_time = maint.refresh_data()
total_time = time | .time() - start_time
print("--- Remove Time: {:>5.2f} seconds, {:>5.2%} ---".format(remove_time, remove_time / total_time))
print("--- Get Time: {:>5.2f} seconds, {:>5.2%} ---".format(get_time, get_time / total_time))
print("--- Save Time: {:>5.2f} seconds, {:>5.2%} ---" | .format(save_time, save_time / total_time))
print("--- Total Time: {:>5.2f} seconds ---".format(total_time))
|
leonardbinet/Transilien-Api-ETL | api_etl/extract_schedule.py | Python | mit | 5,949 | 0.001513 | """
Module used to download from SNCF website trains schedules and save it in the right format
in different databases (Dynamo or relational database)
"""
from os import path, makedirs
import zipfile
from urllib.request import urlretrieve
import logging
import pandas as pd
from api_etl.settings import __GTFS_FOLDER_PATH__, __GTFS_CSV_URL__, __DATA_PATH__
from api_etl.utils_rdb import RdbProvider
from api_etl.data_models import (
Agency,
Route,
Trip,
StopTime,
Stop,
Calendar,
CalendarDate,
)
from api_etl.utils_misc import get_paris_local_datetime_now, S3Bucket
from api_etl.settings import __S3_BUCKETS__
logger = logging.getLogger(__name__)
pd.options.mode.chained_assignment = None
class ScheduleExtractor:
""" Common class for schedule extractors
"""
def __init__(self):
self.gtfs_folder = __GTFS_FOLDER_PATH__
self.schedule_url = __GTFS_CSV_URL__
self.files_present = None
self._check_files()
def _check_files(self):
files_to_check = [
"gtfs-lines-last/calendar.txt",
"gtfs-lines-last/trips.txt",
"gtfs-lines-last/stop_times.txt",
"gtfs-lines-last/stops.txt",
"gtfs-lines-last/calendar_dates.txt",
]
# Default: True, and if one missing -> False
self.files_present = True
for file_check in files_to_check:
try:
pd.read_csv(
path.join(self.gtfs_folder, file_check)
)
except FileNotFoundError:
logger.warning("File %s not found in data folder %s" %
(file_check, self.gtfs_folder))
self.files_present = False
return self.files_present
def download_gtfs_files(self):
"""
Download gtfs files from SNCF website (based on URL defined in settings module) and saves it in data folder
(defined as well in settings module). There is no paramater to pass.
Process is in two steps:
- first: download csv file containing links to zip files
- second: download files based on urls found in csv from first step
Folder names in which files are unzip are based on the headers of the zip files.
Function returns True if 'gtfs-lines-last' folder has been found (this is the usual folder we use then to find
schedules). Return False otherwise.
:rtype: boolean
"""
logger.info(
"Download of csv containing links of zip files, at u | rl %s", self.schedule_url)
gtfs_links = pd.read_csv(self.schedule_url)
# Create data folder if n | ecessary
if not path.exists(self.gtfs_folder):
makedirs(self.gtfs_folder)
# Download and unzip all files
# Check if one is "gtfs-lines-last"
gtfs_lines_last_present = False
for link in gtfs_links["file"].values:
logger.info("Download of %s", link)
local_filename, headers = urlretrieve(link)
logger.info("File name is %s", headers.get_filename())
# Get name in header and remove the ".zip"
extracted_data_folder_name = headers.get_filename().split(".")[0]
if extracted_data_folder_name == "gtfs-lines-last":
gtfs_lines_last_present = True
with zipfile.ZipFile(local_filename, "r") as zip_ref:
full_path = path.join(
self.gtfs_folder, extracted_data_folder_name)
zip_ref.extractall(path=full_path)
if gtfs_lines_last_present:
logger.info("The 'gtfs-lines-last' folder has been found.")
else:
logger.error(
"The 'gtfs-lines-last' folder has not been found! Schedules will not be updated.")
return gtfs_lines_last_present
def save_gtfs_in_s3(self):
day = get_paris_local_datetime_now().strftime("%Y%m%d")
sb = S3Bucket(__S3_BUCKETS__["gtfs-files"], create_if_absent=True)
sb.send_folder(
folder_local_path=self.gtfs_folder,
folder_remote_path=day
)
class ScheduleExtractorRDB(ScheduleExtractor):
""" For relational database
"""
def __init__(self, dsn=None):
ScheduleExtractor.__init__(self)
self.dsn = dsn
self.rdb_provider = RdbProvider(self.dsn)
def save_in_rdb(self, tables=None):
assert self.files_present
to_save = [
("agency.txt", Agency),
("routes.txt", Route),
("trips.txt", Trip),
("stops.txt", Stop),
("stop_times.txt", StopTime),
("calendar.txt", Calendar),
("calendar_dates.txt", CalendarDate)
]
if tables:
assert isinstance(tables, list)
to_save = [to_save[i] for i in tables]
for name, model in to_save:
df = pd.read_csv(path.join(self.gtfs_folder, name))
df = df.applymap(str)
dicts = df.to_dict(orient="records")
objects = list(map(lambda x: model(**x), dicts))
logger.info("Saving %s file in database, containing %s objects." % (
name, len(objects)))
session = self.rdb_provider.get_session()
try:
# Try to save bulks (initial load)
chunks = [objects[i:i + 100]
for i in range(0, len(objects), 100)]
for chunk in chunks:
logger.debug("Bulk of 100 items saved.")
session.bulk_save_objects(chunk)
session.commit()
except Exception:
# Or save items one after the other
session.rollback()
for obj in objects:
session.merge(obj)
session.commit()
session.close()
|
kret0s/gnuhealth-live | tryton/server/trytond-3.8.3/trytond/modules/sale_promotion/tests/test_sale_promotion.py | Python | gpl-3.0 | 824 | 0.002427 | # This file is part of Tryton. The COPYRIGHT file at the top level of
# this repository contains the full copyright notices and license terms.
import unittest
import doctest
import trytond.tests.test_tryton
from trytond.tests.test_tryton import ModuleTestCase
from trytond.tests.test_tryton import doctest_setup, doctest_teardown
class SalePromoti | onTestCase(ModuleTestCase):
'Test Sale Promotion module'
module = 'sale_promotion'
def suite():
suite = trytond.tests.test_tryton.suite()
suite.addTests(unittest.TestLoader().loadTestsFromTestCase(
SalePromotionTestCase))
suite.addTests(doctest.DocFileSuite('scenario_sale_promotion.rst',
setUp= | doctest_setup, tearDown=doctest_teardown, encoding='utf-8',
optionflags=doctest.REPORT_ONLY_FIRST_FAILURE))
return suite
|
wathen/PhD | MHD/FEniCS/MHD/Stabilised/SaddlePointForm/Test/SplitMatrix/ScottTest/Lshaped/Dominik/NS.py | Python | mit | 11,453 | 0.021304 | #!/usr/bin/python
# interpolate scalar gradient onto nedelec space
im | port petsc4py
import sys
petsc4py.init(sys.argv)
from petsc4py import PETSc
from dolfin import *
import mshr
Print = PETSc.Sys.Print
# from MatrixOperations import *
import numpy as np
import PETScIO as IO
import common
import scipy
import scipy.io
import time
import BiLinear as forms
import IterOperation | s as Iter
import MatrixOperations as MO
import CheckPetsc4py as CP
import ExactSol
import Solver as S
import MHDmatrixPrecondSetup as PrecondSetup
import NSprecondSetup
import MHDprec as MHDpreconditioner
import memory_profiler
import gc
import MHDmulti
import MHDmatrixSetup as MHDsetup
import Lshaped
import NSpreconditioner
#@profile
m = 5
set_log_active(False)
errL2u =np.zeros((m-1,1))
errH1u =np.zeros((m-1,1))
errL2p =np.zeros((m-1,1))
errL2b =np.zeros((m-1,1))
errCurlb =np.zeros((m-1,1))
errL2r =np.zeros((m-1,1))
errH1r =np.zeros((m-1,1))
l2uorder = np.zeros((m-1,1))
H1uorder =np.zeros((m-1,1))
l2porder = np.zeros((m-1,1))
l2border = np.zeros((m-1,1))
Curlborder =np.zeros((m-1,1))
l2rorder = np.zeros((m-1,1))
H1rorder = np.zeros((m-1,1))
NN = np.zeros((m-1,1))
DoF = np.zeros((m-1,1))
Velocitydim = np.zeros((m-1,1))
Magneticdim = np.zeros((m-1,1))
Pressuredim = np.zeros((m-1,1))
Lagrangedim = np.zeros((m-1,1))
Wdim = np.zeros((m-1,1))
iterations = np.zeros((m-1,1))
SolTime = np.zeros((m-1,1))
udiv = np.zeros((m-1,1))
MU = np.zeros((m-1,1))
level = np.zeros((m-1,1))
NSave = np.zeros((m-1,1))
Mave = np.zeros((m-1,1))
TotalTime = np.zeros((m-1,1))
nn = 2
dim = 2
ShowResultPlots = 'yes'
split = 'Linear'
uu0, ub0, pu0, pb0, bu0, bb0, ru0, AdvectionU, AdvectionB, Mu, Mb = Lshaped.SolutionSetUp()
MU[0]= 1e0
for xx in xrange(1,m):
print xx
level[xx-1] = xx + 0
nn = 2**(level[xx-1])
# Create mesh and define function space
nn = int(nn)
NN[xx-1] = nn/2
parameters["form_compiler"]["quadrature_degree"] = -1
mesh, boundaries, domains = Lshaped.Domain(nn)
order = 2
parameters['reorder_dofs_serial'] = False
Velocity = VectorFunctionSpace(mesh, "CG", order)
Pressure = FunctionSpace(mesh, "CG", order-1)
W = MixedFunctionSpace([Velocity, Pressure])
Velocitydim[xx-1] = Velocity.dim()
Pressuredim[xx-1] = Pressure.dim()
Wdim[xx-1] = W.dim()
print "\n\nW: ",Wdim[xx-1],"Velocity: ",Velocitydim[xx-1],"Pressure: ",Pressuredim[xx-1],"\n\n"
dim = [Velocity.dim(), Pressure.dim()]
def boundary(x, on_boundary):
return on_boundary
FSpaces = [Velocity,Pressure]
kappa = 1.0
Mu_m =10.0
MU = 1.0
N = FacetNormal(mesh)
ds = Measure('ds', domain=mesh, subdomain_data=boundaries)
# g = inner(p0*N - MU*grad(u0)*N,v)*dx
IterType = 'Full'
Split = "No"
Saddle = "No"
Stokes = "No"
SetupType = 'python-class'
params = [kappa,Mu_m,MU]
u0, p0, b0, r0, Advection, Mcouple = Lshaped.SolutionMeshSetup(mesh, params, uu0, ub0, pu0, pb0, bu0, bb0, AdvectionU, AdvectionB, Mu, Mb)
F_M = Expression(("0.0","0.0"))
F_S = Expression(("0.0","0.0"))
n = FacetNormal(mesh)
u_k, p_k = Lshaped.Stokes(Velocity, Pressure, F_S, u0, 1, params, boundaries)
(u, p) = TrialFunctions(W)
(v, q) = TestFunctions(W)
a11 = params[2]*inner(grad(v), grad(u))*dx + inner((grad(u)*u_k),v)*dx + (1./2)*div(u_k)*inner(u,v)*dx - (1./2)*inner(u_k,n)*inner(u,v)*ds
a12 = -div(v)*p*dx
a21 = -div(u)*q*dx
a = a11 + a21 + a12
Lns = inner(v, F_S)*dx #+ inner(Neumann,v)*ds(2)
a11 = params[2]*inner(grad(v), grad(u_k))*dx + inner((grad(u_k)*u_k),v)*dx + (1./2)*div(u_k)*inner(u_k,v)*dx - (1./2)*inner(u_k,n)*inner(u_k,v)*ds
a12 = -div(v)*p_k*dx
a21 = -div(u_k)*q*dx
L = Lns - ( a11 + a21 + a12 )
MO.PrintStr("Setting up MHD initial guess",5,"+","\n\n","\n\n")
ones = Function(Pressure)
ones.vector()[:]=(0*ones.vector().array()+1)
x= np.concatenate((u_k.vector().array(),p_k.vector().array()), axis=0)
KSPlinearfluids, MatrixLinearFluids = PrecondSetup.FluidLinearSetup(Pressure, MU)
kspFp, Fp = PrecondSetup.FluidNonLinearSetup(Pressure, MU, u_k)
parameters['linear_algebra_backend'] = 'uBLAS'
eps = 1.0 # error measure ||u-u_k||
tol = 1.0E-4 # tolerance
iter = 0 # iteration counter
maxiter = 10 # max no of iterations allowed
SolutionTime = 0
outer = 0
u_is = PETSc.IS().createGeneral(range(Velocity.dim()))
NS_is = PETSc.IS().createGeneral(range(Velocity.dim()+Pressure.dim()))
M_is = PETSc.IS().createGeneral(range(Velocity.dim()+Pressure.dim(),W.dim()))
OuterTol = 1e-5
InnerTol = 1e-5
NSits =0
Mits =0
TotalStart =time.time()
SolutionTime = 0
while eps > tol and iter < maxiter:
iter += 1
MO.PrintStr("Iter "+str(iter),7,"=","\n\n","\n\n")
bcu = DirichletBC(W.sub(0),Expression(("0.0","0.0")), boundary)
A, b = assemble_system(a, L, bcu)
A, b = CP.Assemble(A,b)
u = b.duplicate()
n = FacetNormal(mesh)
b_t = TrialFunction(Velocity)
c_t = TestFunction(Velocity)
aa = params[2]*inner(grad(b_t), grad(c_t))*dx(W.mesh()) + inner((grad(b_t)*u_k),c_t)*dx(W.mesh()) +(1./2)*div(u_k)*inner(c_t,b_t)*dx(W.mesh()) - (1./2)*inner(u_k,n)*inner(c_t,b_t)*ds(W.mesh())
ShiftedMass = assemble(aa)
bcu.apply(ShiftedMass)
ShiftedMass = CP.Assemble(ShiftedMass)
kspF = NSprecondSetup.LSCKSPnonlinear(ShiftedMass)
ksp = PETSc.KSP()
ksp.create(comm=PETSc.COMM_WORLD)
# ksp.setTolerances(1e-5)
ksp.setType('preonly')
pc = ksp.getPC()
pc.setType(PETSc.PC.Type.LU)
# pc.setPythonContext(NSpreconditioner.NSPCD(W, kspF, KSPlinearfluids[0], KSPlinearfluids[1], Fp))
ksp.setOperators(A)
OptDB = PETSc.Options()
ksp.max_it = 1000
ksp.setFromOptions()
stime = time.time()
ksp.solve(b,u)
# u, mits,nsits = S.solve(A,b,u,params,W,'Directss',IterType,OuterTol,InnerTol,HiptmairMatrices,Hiptmairtol,KSPlinearfluids, Fp,kspF)
Soltime = time.time()- stime
MO.StrTimePrint("MHD solve, time: ", Soltime)
print ' its = ',ksp.its
Mits += 0
NSits += ksp.its
SolutionTime += Soltime
uu = u.array
UU = uu[0:Velocity.dim()]
u1 = Function(Velocity)
u1.vector()[:] = u1.vector()[:] + UU
pp = uu[Velocity.dim():Velocity.dim()+Pressure.dim()]
p1 = Function(Pressure)
p1.vector()[:] = p1.vector()[:] + pp
p1.vector()[:] += - assemble(p1*dx)/assemble(ones*dx)
diff = u1.vector().array()
u2 = Function(Velocity)
u2.vector()[:] = u1.vector().array() + u_k.vector().array()
p2 = Function(Pressure)
p2.vector()[:] = p1.vector().array() + p_k.vector().array()
p2.vector()[:] += - assemble(p2*dx)/assemble(ones*dx)
eps = np.linalg.norm(diff) + np.linalg.norm(p1.vector().array())
print '\n\n\niter=%d: norm=%g' % (iter, eps)
u_k.assign(u2)
p_k.assign(p2)
# plot(p_k)
uOld = np.concatenate((u_k.vector().array(),p_k.vector().array()), axis=0)
r = IO.arrayToVec(uOld)
# p1.vector()[:] += - assemble(p1*dx)/assemble(ones*dx)
u_k.assign(u1)
p_k.assign(p1)
# b_k.assign(b1)
# r_k.assign(r1)
uOld= np.concatenate((u_k.vector().array(),p_k.vector().array()), axis=0)
x = IO.arrayToVec(uOld)
SolTime[xx-1] = SolutionTime/iter
NSave[xx-1] = (float(NSits)/iter)
Mave[xx-1] = (float(Mits)/iter)
iterations[xx-1] = iter
TotalTime[xx-1] = time.time() - TotalStart
# XX= np.concatenate((u_k.vector().array(),p_k.vector().array(),b_k.vector().array(),r_k.vector().array()), axis=0)
# dim = [Velocity.dim(), Pressure.dim(), Magnetic.dim(),Lagrange.dim()]
# u0, p0, b0, r0, F_NS, F_M, F_MX, F_S, gradu0, Neumann, p0vec = Lshaped.Solution2(mesh, params)
ExactSolution = [u0,p0,b0,r0]
Vdim = dim[0]
Pdim = dim[1]
# Mdim = dim[2]
# Rdim = |
sgarrity/bedrock | bedrock/releasenotes/views.py | Python | mpl-2.0 | 8,074 | 0.001362 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import re
from copy import copy
from operator import attrgetter
from django.conf import settings
from django.http import Http404, HttpResponseRedirect
from django.urls import NoReverseMatch
from lib import l10n_utils
from bedrock.base.urlresolvers import reverse
from bedrock.firefox.firefox_details import firefox_desktop
from bedrock.firefox.templatetags.helpers import android_builds, ios_builds
from bedrock.releasenotes.models import get_latest_release_or_404, get_release_or_404, get_releases_or_404
SUPPORT_URLS = {
'Firefox for Android': 'https://support.mozilla.org/products/mobile',
'Firefox for iOS': 'https://support.mozilla.org/products/ios',
'Firefox': 'https://support.mozilla.org/products/firefox',
}
def release_notes_template(channel, product, version=None):
channel = channel or 'release'
version = version or 0
if product == 'Firefox' and channel == 'Aurora' and version >= 35:
return 'firefox/releases/dev-browser-notes.html'
dir = 'firefox'
return ('{dir}/releases/{channel}-notes.html'
.format(dir=dir, channel=channel.lower()))
def equivalent_release_url(release):
equivalent_release = (release.equivalent_android_release() or release.equivalent_desktop_release())
if equivalent_release:
return equivalent_release.get_absolute_url()
def get_download_url(release):
if release.product == 'Firefox for Android':
return android_builds(release.channel)[0]['download_link']
elif release.product == 'Firefox for iOS':
return ios_builds(release.channel)[0]['download_link']
else:
if release.channel == 'Aurora':
return reverse('firefox.channel.desktop') + '#developer'
elif release.channel == 'Beta':
return reverse('firefox.channel.desktop') + '#beta'
else:
return reverse('firefox')
def show_android_sys_req(version):
match = re.match(r'\d{1,2}', version)
if match:
num_version = int(match.group(0))
return num_version >= 46
return False
def check_url(product, version):
if product == 'Firefox for Android':
# System requirement pages for Android releases exist from 46.0 and upward.
if show_android_sys_req(version):
return reverse('firefox.android.system_requirements', args=[version])
else:
return settings.FIREFOX_MOBILE_SYSREQ_URL
elif product == 'Firefox for iOS':
return reverse('firefox.ios.system_requirements', args=[version])
else:
return reverse('firefox.system_requirements', args=[version])
def release_notes(request, version, product='Firefox'):
if not version:
raise Http404
# Show a "coming soon" page for any unpublished Firefox releases
include_drafts = product in ['Firefox', 'Firefox for Android']
try:
release = get_release_or_404(version, product, include_drafts)
except Http404:
release = get_release_or_404(version + 'beta', product, include_drafts)
return HttpResponseRedirect(release.get_absolute_url())
# add MDN link to all non-iOS releases. bug 1553566
# avoid adding duplicate notes
release_notes = copy(release.notes)
if release.product != 'Firefox for iOS':
release_notes.insert(0, {
'id': 'mdn',
'is_public': True,
'tag': 'Developer',
'sort_num': 1,
'note': f'<a class="mdn-icon" rel="external" '
f'href="https://developer.mozilla.org/docs/Mozilla/Firefox/Releases/'
f'{ release.major_version }">Developer Information</a>',
})
return l10n_utils.render(
request, release_notes_template(release.channel, product,
int(release.major_version)), {
'version': version,
'download_url': get_download_url(release),
'support_url': SUPPORT_URLS.get(product, 'https://support.mozilla.org/'),
'check_url': check_url(product, version),
'release': release,
'release_notes': release_notes,
'equivalent_release_url': equivalent_release_url(release),
})
def system_requirements(request, version, product='Firefox'):
release = get_release_or_404(version, product)
dir = 'firefox'
return l10n_utils.render(
request, '{dir}/releases/system_requirements.html'.format(dir=dir),
{'release': release, 'version': version})
def latest_release(product='firefox', platform=None, channel=None):
if not platform:
platform = 'desktop'
elif platform == 'android':
product = 'firefox for android'
elif platform == 'ios':
product = 'firefox for ios'
if not channel:
channel = 'release'
elif channel in ['developer', 'earlybird']:
channel = 'beta'
elif channel == 'organizations':
channel = 'esr'
return get_latest_release_or_404(product, channel)
def latest_notes(request, product='firefox', platform=None, channel=None):
release = latest_release(product, platform, channel)
return HttpResponseRedirect(release.get_absolute_url())
def latest_sysreq(request, product='firefox', platform=None, channel=None):
release = latest_release(product, platform, channel)
return HttpResponseRedirect(release.get_sysreq_url())
def releases_index(request, product):
releases = {}
# Starting with Firefox 10, ESR had been offered every 7 major releases, but
# Firefox 59 wasn't ESR. Firefox 60 became the next ESR instead, and since
# then ESR is offered every 8 major releases.
esr_major_versions = (
list(range(10, 59, 7)) + list(range(60, int(firefox_desktop.latest_version().split('.')[0]), 8)))
if product == 'Firefox':
major_releases = firefox_desktop.firefox_history_major_releases
minor_releases = firefox_desktop.firefox_history_stability_releases
for release in major_releases:
major_version = float(re.findall(r'^\d+\.\d+', release)[0])
# The version numbering scheme of Firefox changes sometimes. The second
# number has not been used since Firefox 4, then reintroduced with
# Firefox ESR 24 (Bug 870540). On this index page, 24.1.x should be
# fallen under 24.0. This pattern is a tricky part.
converter = '%g' if int(major_version) in esr_major_versions else '%s'
major_pattern = r'^' + re.escape(converter % round(major_version, 1))
releases[major_version] = {
'major': release,
'minor': sorted([x for x in minor_releases if re.findall(major_pattern, x)],
key=lambda x: [int(y) for y in x.split('.')])
}
return l10n_utils.render(
request, '{product}/releases/index.html'.format(product=product.lower()),
{'releases': sorted(releases.items(), reverse=True)}
)
def nightly_feed(request):
"""Serve an Atom feed with the latest changes in Firefox Nightly"""
notes = {}
releases = get_releases_or_404('firefox', 'nightly', 5)
for release in releases:
try:
link = reverse('firefox.desktop.releasenotes', args=(release.version, 'release'))
except NoReverseMatch:
continue
for note in release.notes:
i | f note.id in notes:
continue
if note.is_public and note.tag:
note.link = '%s#note-%s' % (link, note.id)
note.version = release.version
notes[note.id] = note
# Sort by date in descending order
notes = sorted(notes.values(), key=attrgetter('modified'), rev | erse=True)
return l10n_utils.render(request, 'firefox/releases/nightly-feed.xml',
{'notes': notes},
content_type='application/atom+xml')
|
code-google-com/cortex-vfx | test/IECore/LensDistortOpTest.py | Python | bsd-3-clause | 2,750 | 0.019636 | ##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT | OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
############ | ##############################################################
from IECore import *
import sys
import unittest
class LensDistortOpTest(unittest.TestCase):
def testDistortOpWithStandardLensModel(self):
# The lens model and parameters to use.
o = CompoundObject()
o["lensModel"] = StringData( "StandardRadialLensModel" )
o["distortion"] = DoubleData( 0.2 )
o["anamorphicSqueeze"] = DoubleData( 1. )
o["curvatureX"] = DoubleData( 0.2 )
o["curvatureY"] = DoubleData( 0.5 )
o["quarticDistortion"] = DoubleData( .1 )
# The input image to read.
r = EXRImageReader("test/IECore/data/exrFiles/uvMapWithDataWindow.100x100.exr")
img = r.read()
# Create the Op and set it's parameters.
op = LensDistortOp()
op["input"] = img
op["mode"] = LensModel.Undistort
op['lensModel'].setValue(o)
# Run the Op.
out = op()
r = EXRImageReader("test/IECore/data/exrFiles/uvMapWithDataWindowDistorted.100x100.exr")
img2 = r.read()
self.assertEqual( img.displayWindow, img2.displayWindow )
|
agry/NGECore2 | scripts/object/tangible/wearables/ring/item_ring_set_commando_utility_b_01_01.py | Python | lgpl-3.0 | 1,084 | 0.020295 | import sys
def setup(core, object):
object.setAttachment('radial_filename', 'ring/unity')
object.setAttachment('objType', 'ring')
object.setStfFilename('static_item_n')
object.setStfName('item_ring_set_commando_utility_b_01_01')
object.setDetailFilename('static_item_d')
object.setDetailName('item_ring_set_commando_utility_b_01_01')
object.setIntAttribute('required_combat_level', 85)
object.setStringAttribute('class_required', 'Commando')
object.setIntAttribute('cat_stat_mod_bonus.@stat_n:constitution_modified', 10)
object.setIntAttribute('cat_stat_ | mod_bonus.@stat_n:strength_modified', 15)
object.setIntAttribute('cat_skill_mod_bonus.@stat_n:expertise_devastation_bonus', | 5)
object.setStringAttribute('@set_bonus:piece_bonus_count_3', '@set_bonus:set_bonus_commando_utility_b_1')
object.setStringAttribute('@set_bonus:piece_bonus_count_4', '@set_bonus:set_bonus_commando_utility_b_2')
object.setStringAttribute('@set_bonus:piece_bonus_count_5', '@set_bonus:set_bonus_commando_utility_b_3')
object.setAttachment('setBonus', 'set_bonus_commando_utility_b')
return |
eswartz/panda3d-stuff | programs/dynamic-geometry/draw_path_tris.py | Python | mit | 10,950 | 0.010594 |
'''
Draw a tunnel with keyboard movement, create it and its collision geometry, and walk through it.
Created on Feb 25, 2015
Released Feb 4, 2016
@author: ejs
'''
from panda3d.core import loadPrcFile, loadPrcFileData # @UnusedImport
loadPrcFile("./myconfig.prc")
# loadPrcFileData("", "load-display p3tinydisplay\nbasic-shaders-only #t\nhardware-animated-vertices #f")
# loadPrcFileData("", "notify-level-collide debug")
loadPrcFileData("", "sync-video 1")
from direct.gui.OnscreenText import OnscreenText
from direct.showbase.ShowBase import ShowBase
from panda3d.core import TextNode, GeomNode, LVecBase4i, GeomVertexFormat, Geom,\
GeomVertexWriter, GeomTristrips, GeomVertexData, Vec3, CollisionNode, \
CollisionTraverser, CollisionSphere,\
CollisionFloorMesh, GeomVertexReader, Point3, CollisionHandlerFloor
import sys
import fpscontroller
from direct.directnotify import DirectNotifyGlobal
class MyApp(ShowBase):
def __init__(self):
ShowBase.__init__(self)
self.seeNode = self.render.attachNewNode('see')
self.cam.reparentTo(self.seeNode)
self.cam.setPos(0, 0, 5)
self.fpscamera = fpscontroller.FpsController(self, self.seeNode)
self.fpscamera.setFlyMode(True)
self.prevPos = self.fpscamera.getPos()
self.prevInto = None
self.info = self.genLabelText("Position: <unknown>", 4)
self.makeInstructions()
self.initCollisions()
self.leftColor = LVecBase4i(224, 224, 64, 255)
self.rightColor = LVecBase4i(64, 224, 224, 255)
self.isDrawing = False
self.toggleDrawing()
self.accept("escape", sys.exit) #Escape quits
self.accept("enter", self.toggleDrawing)
def initCollisions(self):
# Initialize the collision traverser.
self.cTrav = CollisionTraverser()
self.cTrav.showCollisions(self.render)
# self.cQueue = CollisionHandlerQueue()
# Initialize the Pusher collision handler.
#self.pusher = CollisionHandlerPusher()
self.pusher = CollisionHandlerFloor()
### player
print DirectNotifyGlobal.directNotify.getCategories()
# Create a collsion node for this object.
playerNode = CollisionNode('player')
playerNode.addSolid(CollisionSphere(0, 0, 0, 1))
# playerNode.setFromCollideMask(BitMask32.bit(0))
# playerNode.setIntoCollideMask(BitMask32.allOn())
# Attach the collision node to the object's model.
self.playerC = self.fpscamera.player.attachNewNode(playerNode)
# Set the object's collision node to render as visible.
self.playerC.show()
# Add the 'player' collision node to the Pusher collision handler.
#self.pusher.addCollider(self.playerC, self.fpscamera.player)
#self.pusher.addCollider(playerC, self.fpscamera.player)
# self.cTrav.addCollider(self.playerC, self.cQueue)
def toggleDrawing(self):
self.isDrawing = not self.isDrawing
if self.isDrawing:
self.drawText.setText("Enter: Turn off drawing")
self.fpscamera.setFlyMode(True)
self.prevPos = None
self.cTrav.removeCollider(self.playerC)
self.pusher.removeCollider(self.playerC)
self.removeTask('updatePhysics')
self.addTask(self.drawHere, 'drawHere')
self.geomNode = GeomNode('geomNode')
self.geomNodePath = self.render.attachNewNode(self.geomNode)
self.geomNodePath.setTwoSided(True)
# apparently p3tinydisplay needs this
self.geomNodePath.setColorOff()
# Create a collision node for this object.
self.floorCollNode = CollisionNode('geom')
# self.floorCollNode.setFromCollideMask(BitMask32.bit(0))
# self.floorCollNode.setIntoCollideMask(BitMask32.allOn())
# Attach the collision node to the object's model.
floorC = self.geomNodePath.attachNewNode(self.floorCollNode)
# Set the object's collision node to render as visible.
floorC.show()
#self.pusher.addCollider(floorC, self.geomNodePath)
self.newVertexData()
self.newGeom()
else:
self.drawText.setText("Enter: Turn on drawing")
self.removeTask('drawHere')
if self.prevPos:
self.completePath()
self.fpscamera.setFlyMode(True)
self.drive.setPos(self.fpscamera.getPos())
self.cTrav.addCollider(self.playerC, self.pusher)
self.pusher.addCollider(self.playerC, self.fpscamera.player)
self.taskMgr.add(self.updatePhysics, 'updatePhysics')
def newVertexData(self):
fmt = GeomVertexFormat.getV3c4()
# fmt = GeomVertexFormat.getV3n3c4()
self.vertexData = GeomVertexData("path", fmt, Geom.UHStatic)
self.vertexWriter = GeomVertexWriter(self.vertexData, 'vertex')
# self.normalWriter = GeomVertexWriter(self.vertexData, 'normal')
self.colorWriter = GeomVertexWriter(self.vertexData, 'color')
def newGeom(self):
self.triStrips = GeomTristrips(Geom.UHDynamic)
self.geom = Geom(self.vertexData)
self.geom.addPrimitive(self.triStrips)
def makeInstructions(self):
OnscreenText(text="Draw Path by Walking",
style=1, fg=(1,1,0,1),
pos=(0.5,-0.95), scale = .07)
self.drawText = self.genLabelText("", 0)
self.genLabelText("Walk (W/S/A/D), Jump=Space, Look=PgUp/PgDn", 1)
self.genLabelText(" (hint, go backwards with S to see your path immediately)", 2)
self.genLabelText("ESC: Quit", 3)
def genLabelText(self, text, i):
return OnscreenText(text = text, pos = (-1.3, .95-.05*i), fg=(1,1,0,1),
align = TextNode.ALeft, scale = .05)
def drawHere(self, task):
pos = self.fpscamera.getPos()
self.info.setText("Position: {0}, {1}, {2} at {3} by | {4}".format(int(pos.x*1 | 00)/100., int(pos.y*100)/100., int(pos.z)/100.,
self.fpscamera.getHeading(), self.fpscamera.getLookAngle()))
prevPos = self.prevPos
if not prevPos:
self.prevPos = pos
elif (pos - prevPos).length() > 1:
self.drawQuadTo(prevPos, pos, 2)
row = self.vertexWriter.getWriteRow()
numPrims = self.triStrips.getNumPrimitives()
if numPrims == 0:
primVerts = row
else:
primVerts = row - self.triStrips.getPrimitiveEnd(numPrims-1)
if primVerts >= 4:
self.triStrips.closePrimitive()
if row >= 256:
print "Packing and starting anew"
newGeom = True
self.geom.unifyInPlace(row, False)
else:
newGeom = False
self.completePath()
if newGeom:
self.newVertexData()
self.newGeom()
if not newGeom:
self.triStrips.addConsecutiveVertices(row - 2, 2)
else:
self.drawQuadTo(prevPos, pos, 2)
self.leftColor[1] += 63
self.rightColor[2] += 37
self.prevPos = pos
return task.cont
def drawLineTo(self, pos, color):
self.vertexWriter.addData3f(pos.x, pos.y, pos.z)
# self.normalWriter.addData3f(0, 0, 1)
self.colorWriter.addData4i(color)
self.triStrips.addNextVer |
dedoogong/asrada | HandPose_Detector/FingerPosition.py | Python | apache-2.0 | 1,202 | 0.002496 | from enum import IntEnum
class FingerPosition(IntEnum):
VerticalUp = 0
VerticalDown = 1
HorizontalLeft = 2
HorizontalRight = 3
DiagonalUpRigh | t = 4
DiagonalUpLeft = 5
DiagonalDownRight = 6
DiagonalDownLeft = 7
@staticmethod
def get_finger_position_name(finger_position):
if finger_position == FingerPosition.VerticalUp:
finger_type = 'Vertical Up'
elif finger_position == FingerPosition.VerticalDown:
finger_type = 'Vertical Down'
| elif finger_position == FingerPosition.HorizontalLeft:
finger_type = 'Horizontal Left'
elif finger_position == FingerPosition.HorizontalRight:
finger_type = 'Horizontal Right'
elif finger_position == FingerPosition.DiagonalUpRight:
finger_type = 'Diagonal Up Right'
elif finger_position == FingerPosition.DiagonalUpLeft:
finger_type = 'Diagonal Up Left'
elif finger_position == FingerPosition.DiagonalDownRight:
finger_type = 'Diagonal Down Right'
elif finger_position == FingerPosition.DiagonalDownLeft:
finger_type = 'Diagonal Down Left'
return finger_type |
radicalbit/ambari | contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hcat_client.py | Python | apache-2.0 | 2,941 | 0.00578 | #!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor l | icense agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. Th | e ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from hcat import hcat
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyImpl
from resource_management.core.logger import Logger
from resource_management.core.exceptions import ClientComponentHasNoStatus
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.script.script import Script
class HCatClient(Script):
def install(self, env):
import params
self.install_packages(env)
self.configure(env)
def configure(self, env):
import params
env.set_params(params)
hcat()
def status(self, env):
raise ClientComponentHasNoStatus()
@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
class HCatClientWindows(HCatClient):
pass
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class HCatClientDefault(HCatClient):
def get_component_name(self):
# HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
# update after daemons, this ensures that the hcat directories are correct on hosts
# which do not include the WebHCat daemon
return "hive-webhcat"
def pre_upgrade_restart(self, env, upgrade_type=None):
"""
Execute <stack-selector-tool> before reconfiguring this client to the new stack version.
:param env:
:param upgrade_type:
:return:
"""
Logger.info("Executing Hive HCat Client Stack Upgrade pre-restart")
import params
env.set_params(params)
# this function should not execute if the stack version does not support rolling upgrade
if not (params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version)):
return
# HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
# update after daemons, this ensures that the hcat directories are correct on hosts
# which do not include the WebHCat daemon
stack_select.select("hive-webhcat", params.version)
if __name__ == "__main__":
HCatClient().execute()
|
glaudsonml/kurgan-ai | tools/sqlmap/waf/modsecurity.py | Python | apache-2.0 | 777 | 0.003861 | #!/usr/bin/env python
"""
Copyright (c) 2006-2016 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copyi | ng permission
"""
import re
from lib.core.enums import HTTP_HEADER
from lib.core.settings import WAF_ATTACK_VECTORS
__product__ = "ModSecurity: Open Source Web Application Firewall (Trustwave)"
def detect(get_page):
retval = False
for vector in WAF_ATTACK_VECTORS:
page, headers, code = get_page(get=vector)
| retval = code == 501 and re.search(r"Reference #[0-9A-Fa-f.]+", page, re.I) is None
retval |= re.search(r"Mod_Security|NOYB", headers.get(HTTP_HEADER.SERVER, ""), re.I) is not None
retval |= "This error was generated by Mod_Security" in page
if retval:
break
return retval
|
htlcnn/pyrevitscripts | HTL.tab/Test.panel/Test.pushbutton/keyman/keyman/keyman/settings.py | Python | mit | 3,109 | 0.001287 | """
Django settings for keyman project.
Generated by 'django-admin startproject' using Django 1.11.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$-2ijwgs8-3i*r#j@1ian5xrp+17)fz)%cdjjhwa#4x&%lk7v@'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'keys',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'keyman.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'keyman.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validati | on.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = | True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
|
googleapis/python-aiplatform | google/cloud/aiplatform_v1/types/tensorboard.py | Python | apache-2.0 | 4,087 | 0.001223 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.aiplatform_v1.types import encryption_spec as gca_encryption_spec
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1", manifest={"Tensorboard",},
)
class Tensorboard(proto.Message):
r"""Tensorboard is a physical database that stores users'
training metrics. A default Tensorboard is provided in each
region of a GCP project. If needed users can also create extra
Tensorboards in their projects.
Attributes:
name (str):
Output only. Name of the Tensorboard. Format:
``projects/{project}/locations/{location}/tensorboards/{tensorboard}``
display_name (str):
Required. User provided name of this
Tensorboard.
description (str):
Description of this Tensorboard.
encryption_spec (google.cloud.aiplatform_v1.types.EncryptionSpec):
Customer-managed encryption key spec for a
Tensorboard. If set, this Tensorboard and all
sub-resources of this Tensorboard will be
secured by this key.
blob_storage_path_prefix (str):
Output only. Consumer project Cloud Storage
path prefix used to store blob data, which can
either be a bucket or directory. Does not end
with a '/'.
run_count (int):
Output only. The number of Runs stored in
this Tensorboard.
create_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp when this Tensorboard
was created.
update_time (google.protobuf.timestamp_pb2.Timestamp):
Output only. Timestamp when this Tensorboard
was last updated.
labels (Sequence[google.cloud.aiplatform_v1.types.Tensorboard.LabelsEntry]):
The labels with user-defined metadata to
organize your Tensorboards.
Label keys and values can be no longer than 64
characters (Unicode codepoints), can only
contain lowercase letters, numeric characters,
underscores and dashes. International characters
are allowed. No more than 64 user labels can be
associated with one Tensorboard (System labels
are excluded).
See https://goo.gl/xmQnxf for more information
and examples of labels. System reserved label
keys are prefixed with
"aiplatform.googleapis.com/" and are immutable.
etag (str):
Used to perform a consistent
read-modify-write updates. If not set, a blind
"overwrite" update happens.
"""
name = proto.Field(proto.STRING, number=1,)
display_name = proto.Field(proto.STRING, number=2,)
description = proto.Field(proto.STRING, number=3,)
encryption_spec = proto.Field(
proto.MESSAGE, number=11, message=gca_encryption_spec.EncryptionSpec,
)
blob_storage_path_prefix = proto.Field(proto.STRING, number=10,)
run_count = proto.Field(proto.INT32, number=5,)
create_time = proto.Field(proto.MESSAGE, number=6 | , message=timestamp_pb2.Timestamp,)
update_time = proto.Field(proto.MESSAGE, number=7, message=timestamp_pb2.Timestamp,)
labels = proto.MapField(pro | to.STRING, proto.STRING, number=8,)
etag = proto.Field(proto.STRING, number=9,)
__all__ = tuple(sorted(__protobuf__.manifest))
|
bzz/kythe | .ycm_extra_conf.py | Python | apache-2.0 | 9,245 | 0.008329 | #!/usr/bin/python
# Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# This .ycm_extra_conf will be picked up automatically for code completion using
# YouCompleteMe.
#
# See https://valloric.github.io/YouCompleteMe/ for instructions on setting up
# YouCompleteMe using Vim. This .ycm_extra_conf file also works with any other
# completion engine that uses YCMD (https://github.com/Valloric/ycmd).
#
# Code completion depends on a Clang compilation database. This is placed in a
# file named `compile_commands.json` in your execution root path. I.e. it will
# be at the path returned by `bazel info execution_root`.
#
# If the compilation database isn't available, this script will generate one
# using tools/cpp/generate_compilation_database.sh. This process can be slow if
# you haven't built the sources yet. It's always a good idea to run
# generate_compilation_database.sh manually so that you can see the build output
# including any errors encountered during compile command generation.
# ==============================================================================
import json
import os
import shlex
import subprocess
import time
# If all else fails, then return this list of flags.
DEFAULT_FLAGS = []
CANONICAL_SOURCE_FILE = 'kythe/cxx/extractor/cxx_extractor_main.cc'
# Full path to directory containing compilation database. This is usually
# |execution_root|/compile_commands.json.
COMPILATION_DATABASE_PATH = None
# Workspace path.
WORKSPACE_PATH = None
# The compilation database. This is a mapping from the absolute normalized path
# of the source file to it's compile command broken down into an array.
COMPILATION_DATABASE = {}
# If loading the compilation database failed for some reason,
# LAST_INIT_FAILURE_TIME contains the value of time.clock() at the time the
# failure was encountered.
L | AST_INIT_FAILURE_TIME = None
# If this many seconds have passed since the last failure, then try to generate
# the compilation database again.
RETRY_TIMEOUT_SECONDS = 120
HEADER_EXTENSIONS = ['.h', '.hpp', '.hh', '.hxx']
SOURCE_EXT | ENSIONS = ['.cc', '.cpp', '.c', '.m', '.mm', '.cxx']
NORMALIZE_PATH = 1
REMOVE = 2
# List of clang options and what to do with them. Use the '-foo' form for flags
# that could be used as '-foo <arg>' and '-foo=<arg>' forms, and use '-foo=' for
# flags that can only be used as '-foo=<arg>'.
#
# Mapping a flag to NORMALIZE_PATH causes its argument to be normalized against
# the build directory via ExpandAndNormalizePath(). REMOVE causes both the flag
# and its value to be removed.
CLANG_OPTION_DISPOSITION = {
'-I': NORMALIZE_PATH,
'-MF': REMOVE,
'-cxx-isystem': NORMALIZE_PATH,
'-dependency-dot': REMOVE,
'-dependency-file': REMOVE,
'-fbuild-session-file': REMOVE,
'-fmodule-file': NORMALIZE_PATH,
'-fmodule-map-file': NORMALIZE_PATH,
'-foptimization-record-file': REMOVE,
'-fprebuilt-module-path': NORMALIZE_PATH,
'-fprofile-generate=': REMOVE,
'-fprofile-instrument-generate=': REMOVE,
'-fprofile-user=': REMOVE,
'-gcc-tollchain=': NORMALIZE_PATH,
'-idirafter': NORMALIZE_PATH,
'-iframework': NORMALIZE_PATH,
'-imacros': NORMALIZE_PATH,
'-include': NORMALIZE_PATH,
'-include-pch': NORMALIZE_PATH,
'-iprefix': NORMALIZE_PATH,
'-iquote': NORMALIZE_PATH,
'-isysroot': NORMALIZE_PATH,
'-isystem': NORMALIZE_PATH,
'-isystem-after': NORMALIZE_PATH,
'-ivfsoverlay': NORMALIZE_PATH,
'-iwithprefixbefore': NORMALIZE_PATH,
'-iwithsysroot': NORMALIZE_PATH,
'-o': REMOVE,
'-working-directory': NORMALIZE_PATH,
}
def ProcessOutput(args):
"""Run the program described by |args| and return its stdout as a stream.
|stderr| and |stdin| will be set to /dev/null. Will raise CalledProcessError
if the subprocess doesn't complete successfully.
"""
output = ''
with open(os.devnull, 'w') as err:
with open(os.devnull, 'r') as inp:
output = subprocess.check_output(args, stderr=err, stdin=inp)
return str(output).strip()
def InitBazelConfig():
"""Initialize globals based on Bazel configuration.
Initialize COMPILATION_DATABASE_PATH, WORKSPACE_PATH, and
CANONICAL_SOURCE_FILE based on Bazel. These values are not expected to change
during the session."""
global COMPILATION_DATABASE_PATH
global WORKSPACE_PATH
global CANONICAL_SOURCE_FILE
execution_root = ProcessOutput(['bazel', 'info', 'execution_root'])
COMPILATION_DATABASE_PATH = os.path.join(execution_root,
'compile_commands.json')
WORKSPACE_PATH = ProcessOutput(['bazel', 'info', 'workspace'])
CANONICAL_SOURCE_FILE = ExpandAndNormalizePath(CANONICAL_SOURCE_FILE,
WORKSPACE_PATH)
def GenerateCompilationDatabaseSlowly():
"""Generate compilation database. May take a while."""
script_path = os.path.join(WORKSPACE_PATH, 'tools', 'cpp',
'generate_compilation_database.sh')
ProcessOutput(script_path)
def ExpandAndNormalizePath(filename, basepath=WORKSPACE_PATH):
"""Resolves |filename| relative to |basepath| and expands symlinks."""
if not os.path.isabs(filename) and basepath:
filename = os.path.join(basepath, filename)
filename = os.path.realpath(filename)
return str(filename)
def PrepareCompileFlags(compile_command, basepath):
flags = shlex.split(compile_command)
flags_to_return = []
use_next_flag_as_value_for = None
def HandleFlag(name, value, combine):
disposition = CLANG_OPTION_DISPOSITION.get(name, None)
if disposition is None and combine:
disposition = CLANG_OPTION_DISPOSITION.get(name + '=', None)
if disposition == REMOVE:
return
if disposition == NORMALIZE_PATH:
value = ExpandAndNormalizePath(value, basepath)
if combine:
flags_to_return.append('{}={}'.format(name, value))
else:
flags_to_return.extend([name, value])
for flag in flags:
if use_next_flag_as_value_for is not None:
name = use_next_flag_as_value_for
use_next_flag_as_value_for = None
HandleFlag(name, flag, combine=False)
continue
if '=' in flag: # -foo=bar
name, value = flag.split('=', 1)
HandleFlag(name, value, combine=True)
continue
if flag in CLANG_OPTION_DISPOSITION:
use_next_flag_as_value_for = flag
continue
if flag.startswith('-I'):
HandleFlag('-I', flags[3:], combine=False)
continue
flags_to_return.append(flag)
return flags_to_return
def LoadCompilationDatabase():
if not os.path.exists(COMPILATION_DATABASE_PATH):
GenerateCompilationDatabaseSlowly()
with open(COMPILATION_DATABASE_PATH, 'r') as database:
database_dict = json.load(database)
global COMPILATION_DATABASE
COMPILATION_DATABASE = {}
for entry in database_dict:
filename = ExpandAndNormalizePath(entry['file'], WORKSPACE_PATH)
directory = entry['directory']
command = entry['command']
COMPILATION_DATABASE[filename] = {
'command': command,
'directory': directory
}
def IsHeaderFile(filename):
extension = os.path.splitext(filename)[1]
return extension in HEADER_EXTENSIONS
def FindAlternateFile(filename):
if IsHeaderFile(filename):
basename = os.path.splitext(filename)[0]
for extension in SOURCE_EXTENSIONS:
new_filename = basename + extension
if new_filename in COMPILATION_DATABASE:
return new_filename
# Try something in the same directory.
directory = os.path.dirname(filename)
for key in COMPILATION_DATABAS |
JoePelz/SAM | spec/python/pages/test_rules.py | Python | gpl-3.0 | 19,920 | 0.001908 | # coding=utf-8
from spec.python import db_connection
import operator
import pytest
from datetime import datetime
from sam.pages.rules | import Rules, RulesApply, RulesEdit, RulesNew
from sam.models.security import rules, rule_template, ruling_process
from sam import errors
db = db_connection.db
sub_id = db_connection.default_sub
ds_full = db_connection.dsid_default
def reset_dummy_rules():
r = rules.Rules(db, sub_id)
r.clear()
r.add_rule("compromised.yml", 'comp hosts', 'desc1', {})
r.add_rule("dos.yml", 'DDoS', 'desc2', {})
r.add_rule("portscan.yml", 'port scans', 'd | esc3', {})
r.add_rule("suspicious.yml", 'suspicious traffic', 'desc4', {})
all_rules = r.get_all_rules()
ids = [rule.id for rule in all_rules]
# enable all but portscan.yml
r.edit_rule(ids[0], {'active': True})
r.edit_rule(ids[1], {'active': True})
r.edit_rule(ids[3], {'active': True})
def test_rules_decode():
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = Rules()
assert r.decode_get_request({}) is None
assert r.decode_get_request({'method': 'nothing'}) is None
assert r.decode_get_request('Garbage') is None
def test_rules_perform():
reset_dummy_rules()
r_model = rules.Rules(db, sub_id)
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = Rules()
response = r.perform_get_command(None)
assert response == r_model.get_all_rules()
def test_rules_encode():
reset_dummy_rules()
r_model = rules.Rules(db, sub_id)
all_rules = r_model.get_all_rules()
ids = [rule.id for rule in all_rules]
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = Rules()
encoded = r.encode_get_response(all_rules)
expected = {'all': [
{'id': ids[0], 'name': 'comp hosts', 'desc': 'desc1', 'template': 'Compromised Traffic', 'type': 'immediate', 'active': True},
{'id': ids[1], 'name': 'DDoS', 'desc': 'desc2', 'template': 'High Traffic', 'type': 'periodic', 'active': True},
{'id': ids[2], 'name': 'port scans', 'desc': 'desc3', 'template': 'Port Scanning', 'type': 'periodic', 'active': False},
{'id': ids[3], 'name': 'suspicious traffic', 'desc': 'desc4', 'template': 'IP -> IP/Port', 'type': 'immediate', 'active': True},
]}
assert encoded == expected
# ================= RulesNew =================
def test_rulesnew_get_decode():
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = RulesNew()
assert r.decode_get_request({}) is None
assert r.decode_get_request({'method': 'nothing'}) is None
assert r.decode_get_request('Garbage') is None
def test_rulesnew_get_perform():
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = RulesNew()
response = r.perform_get_command(None)
expected = [
('compromised.yml', 'Compromised Traffic'),
('custom: test_rule.yml', 'Test Yaml'),
('dos.yml', 'High Traffic'),
('netscan.yml', 'Network Scanning'),
('portscan.yml', 'Port Scanning'),
('suspicious.yml', 'IP -> IP/Port'),
]
response.sort(key=operator.itemgetter(0))
assert response == expected
def test_rulesnew_get_encode():
all_templates = rule_template.get_all()
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = RulesNew()
response = r.encode_get_response(all_templates)
expected = {
'portscan.yml',
'netscan.yml',
'suspicious.yml',
'compromised.yml',
'dos.yml',
'custom: test_rule.yml'}
assert 'templates' in response
assert set(response['templates']) == expected
def test_rulesnew_post_decode():
"""
valid requests include:
name: blah, desc: blah, template: blah
:return:
"""
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = RulesNew()
# bad data input
with pytest.raises(errors.RequiredKey):
data = {'name': 'abc', 'desc': 'def'}
r.decode_post_request(data)
with pytest.raises(errors.RequiredKey):
data = {'desc': 'def', 'template': 'ghi'}
r.decode_post_request(data)
with pytest.raises(errors.RequiredKey):
data = {'name': 'abc', 'template': 'ghi'}
r.decode_post_request(data)
with pytest.raises(errors.RequiredKey):
data = {'name': ' ', 'desc': 'def', 'template': 'ghi'}
r.decode_post_request(data)
with pytest.raises(errors.RequiredKey):
data = {'name': 'abc', 'desc': ' ', 'template': 'ghi'}
r.decode_post_request(data)
with pytest.raises(errors.RequiredKey):
data = {'name': 'abc', 'desc': 'def', 'template': ' '}
r.decode_post_request(data)
# good input data
data = {'name': 'abc', 'desc': 'def', 'template': 'ghi'}
request = r.decode_post_request(data)
expected = {
'name': 'abc',
'desc': 'def',
'template': 'ghi'
}
assert request == expected
def test_rulesnew_post_perform():
r_model = rules.Rules(db, sub_id)
r_model.clear()
assert r_model.count() == 0
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = RulesNew()
request = {'name': 'abc', 'desc': 'def', 'template': 'ghi'}
r.perform_post_command(request)
assert r_model.count() == 1
new_rule = r_model.get_all_rules()[0]
assert new_rule.get_name() == 'abc'
assert new_rule.get_desc() == 'def'
def test_rulesnew_post_encode():
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = RulesNew()
assert r.encode_post_response("success") == {'result': 'success'}
assert r.encode_post_response("failure") == {'result': 'failure'}
# ================= RulesEdit =================
def test_rulesedit_get_decode():
reset_dummy_rules()
r_model = rules.Rules(db, sub_id)
all_rules = r_model.get_all_rules()
ids = [rule.id for rule in all_rules]
# valid requests require a rule id ('id') to be specified
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = RulesEdit()
# bad data input
with pytest.raises(errors.RequiredKey):
data = {}
r.decode_get_request(data)
with pytest.raises(errors.MalformedRequest):
data = {'id': 'not a number'}
r.decode_get_request(data)
# good input data
data = {'id': ids[0]}
request = r.decode_get_request(data)
expected = {
'id': ids[0]
}
assert request == expected
def test_rulesedit_get_perform():
reset_dummy_rules()
r_model = rules.Rules(db, sub_id)
all_rules = r_model.get_all_rules()
test_rule = all_rules[0]
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = RulesEdit()
request = {'id': test_rule.id}
response = r.perform_get_command(request)
assert response == test_rule
with pytest.raises(errors.MalformedRequest):
data = {'id': -200}
r.perform_get_command(data)
def test_rulesedit_get_encode():
reset_dummy_rules()
r_model = rules.Rules(db, sub_id)
all_rules = r_model.get_all_rules()
test_rule = all_rules[0]
with db_connection.env(mock_input=True, login_active=False, mock_session=True):
r = RulesEdit()
encoded = r.encode_get_response(test_rule)
assert set(encoded.keys()) == {'id', 'name', 'desc', 'type', 'active', 'exposed', 'actions'}
assert encoded['name'] == 'comp hosts'
assert encoded['desc'] == 'desc1'
assert set(encoded['actions'].keys()) == {
'alert_active', 'alert_s |
EmreAtes/spack | var/spack/repos/builtin/packages/libiconv/package.py | Python | lgpl-2.1 | 2,156 | 0.000464 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PU | RPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import shutil
class Libiconv(AutotoolsPackage):
"""GNU libico | nv provides an implementation of the iconv() function
and the iconv program for character set conversion."""
homepage = "https://www.gnu.org/software/libiconv/"
url = "http://ftp.gnu.org/pub/gnu/libiconv/libiconv-1.15.tar.gz"
version('1.15', 'ace8b5f2db42f7b3b3057585e80d9808')
version('1.14', 'e34509b1623cec449dfeb73d7ce9c6c6')
# We cannot set up a warning for gets(), since gets() is not part
# of C11 any more and thus might not exist.
patch('gets.patch', when='@1.14')
conflicts('@1.14', when='%gcc@5:')
def configure_args(self):
args = ['--enable-extra-encodings']
# A hack to patch config.guess in the libcharset sub directory
shutil.copyfile('./build-aux/config.guess',
'libcharset/build-aux/config.guess')
return args
|
ClockworkOrigins/m2etis | configurator/configurator/NedGenerator.py | Python | apache-2.0 | 6,119 | 0.001471 | __author__ = 'sianwahl'
from string import Template
class NedGenerator:
def __init__(self, number_of_channels):
self.number_of_channels = number_of_channels
def generate(self):
return self._generate_tuplefeeder_ned(), self._generate_m2etis_ned()
def _generate_tuplefeeder_ned(self):
template = """
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see http://www.gnu.org/licenses/.
//
package m2etis.applications.TupleFeeder;
import oversim.common.BaseApp;
import oversim.common.ITier;
simple TupleFeeder extends BaseApp
{
parameters:
@class(TupleFeeder);
int largestKey; // largest key we can pick
int numSubs;
int numPubs;
int numPubSubs;
int numRend;
int channelCount;
double stopAvg;
int waitForSubscribe @unit(s);
int waitForPublish @unit(s);
$channel_specific_parameters
}
module TupleFeederModules like ITier
{
parameters:
@display("i=block/segm");
gates:
input from_lowerTier; // gate from the lower tier
input from_upperTier; // gate from the upper tier
output to_lowerTier; // gate to the lower tier
output to_upperTier; // gate to the upper tier
| input trace_in; // gate for trace file commands
input udpIn;
output udpOut;
input tcpIn;
output tcpOut;
submodules:
tupleFeeder: TupleFeeder;
connections allowunconnected:
from_lowerTier --> tupleFeeder.from_lowerTier;
to_lowerTie | r <-- tupleFeeder.to_lowerTier;
trace_in --> tupleFeeder.trace_in;
udpIn --> tupleFeeder.udpIn;
udpOut <-- tupleFeeder.udpOut;
}
"""
channel_specific_parameters = ""
for i in range(0, self.number_of_channels):
channel_specific_parameters += "int numToSend_" + str(i) + ";\n\t"
channel_specific_parameters += "int burstAmount_" + str(i) + ";\n\t"
channel_specific_parameters += "int burstFrequency_" + str(i) + " @unit(s);\n\t"
channel_specific_parameters += "int burstDuration_" + str(i) + " @unit(s);\n\t"
channel_specific_parameters += "int chanceToUnsubscribe_" + str(i) + ";\n\t"
channel_specific_parameters += "int timeToUnsubscribe_" + str(i) + " @unit(s);\n\t"
channel_specific_parameters += "int timeToSubscribe_" + str(i) + " @unit(s);\n\t"
channel_specific_parameters += "int dropChance_" + str(i) + ";\n\t"
channel_specific_parameters += "bool compensateDrop_" + str(i) + ";\n\t"
channel_specific_parameters += "double fluctuation_" + str(i) + ";\n\t"
template_prepared = Template(template)
result = template_prepared.substitute(
channel_specific_parameters=channel_specific_parameters
)
return result
def _generate_m2etis_ned(self):
template = """
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with this program. If not, see http://www.gnu.org/licenses/.
//
package m2etis.middleware;
import oversim.common.BaseApp;
import oversim.common.ITier;
//
// TODO auto-generated type
//
simple M2etisAdapter extends BaseApp
{
parameters:
@class(M2etisAdapter);
$disable_overlays
int packetSize @unit(B);
int queueSize @unit(B);
int channelCount;
int downstream @unit(bps);
int upstream @unit(bps);
int headerSize @unit(B);
int startRoot;
int endRoot;
int rendezvousNode;
double stopAvg;
double simulationResolution @unit(s);
bool queueDisabled;
}
module M2etisPubSub like ITier
{
gates:
input udpIn; // gate from the UDP layer
output udpOut; // gate to the UDP layer
input from_lowerTier; // gate from the lower tier
input from_upperTier; // gate from the upper tier
output to_lowerTier; // gate to the lower tier
output to_upperTier; // gate to the upper tier
input trace_in; // gate for trace file commands
input tcpIn; // gate from the TCP layer
output tcpOut; // gate to the TCP layer
submodules:
m2etis: M2etisAdapter;
connections allowunconnected:
from_lowerTier --> m2etis.from_lowerTier;
to_lowerTier <-- m2etis.to_lowerTier;
from_upperTier --> m2etis.from_upperTier;
to_upperTier <-- m2etis.to_upperTier;
udpIn --> m2etis.udpIn;
udpOut <-- m2etis.udpOut;
}
"""
disable_overlays = ""
for i in range(0, self.number_of_channels):
disable_overlays += "bool disableOverlay_" + str(i) + ";\n\t"
template_prepared = Template(template)
result = template_prepared.substitute(
disable_overlays=disable_overlays
)
return result
|
tongpa/pypollmanage | pypollmanage/service/questionservice.py | Python | apache-2.0 | 1,641 | 0.014016 | # -*- coding: utf-8 -*-
import json
from tg import request
from tgext.pluggable import app_model
from tgext.pyutilservice import Utility
from surveyobject import QuestionObject
class QuestionService(object):
def __init__(self):
self.utility = Utility()
pass
def create(self, **question):
print question
#print request.body
self.questionObject = QuestionObject(**question)
print "save value:";
print self.questionObject
datagrid = json.loads(self.questionObject.datagrid )
print datagrid
self.user = request.identity['user']
question = self.questionObject.question
question.user_id = self.user.user_id
question.question = self.utility.rlreplace('"', '', question | .question )
question | .id_question = self.utility.setIfEmpty(question.id_question)
self.questionProject = app_model.QuestionProject.getId(question.id_question_project)
questionLang = self.questionObject.questionLang
questionLang.id_language = self.questionProject.id_language
if question.id_question :
question.updateOrdering()
question.updateall()
questionLang.saveOrUpdate()
print "update"
else:
question.save()
questionLang.id_question = question.id_question
questionLang.saveOrUpdate()
print "create"
def delete(self, **question):
self.status = True
self.message = "success"
return self.status, self.message
# |
JulyKikuAkita/PythonPrac | cs15211/MinimumWindowSubstring.py | Python | apache-2.0 | 7,107 | 0.001688 | __source__ = 'https://leetcode.com/problems/minimum-window-substring/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/minimum-window-substring.py
# Time: O(n)
# Space: O(k), k is the number of different characters
# Hashtable
#
# Description: Leetcode # 76. Minimum Window Substring
#
# Given a string S and a string T, find the minimum window in S
# which will contain all the characters in T in complexity O(n).
#
# For example,
# S = "ADOBECODEBANC"
# T = "ABC"
# Minimum window is "BANC".
#
# Note:
# If there is no such window in S that covers all characters in T, return the emtpy string "".
#
# If there are multiple such windows, you are guaranteed that there will always be only one unique minimum window in S.
# Companies
# LinkedIn Snapchat Uber Facebook
# Related Topics
# Hash Table Two Pointers String
# Similar Questions
# Substring with Concatenation of All Words Minimum Size Subarray Sum
# Sliding Window Maximum Permutation in String Smallest Range
#
import unittest
import collections
class Solution:
# @return a string
def minWindow(self, S, T):
current_count = [0 for i in xrange(52)] #Radix a-zA-Z -> 52
expected_count = [0 for i in xrange(52)]
for char in T:
expected_count[ord(char) - ord('a')] += 1
i, count, start, min_width, min_start = 0, 0, 0, float("inf"), 0
while i < len(S):
current_count[ord(S[i]) - ord('a')] += 1
if current_count[ord(S[i]) - ord('a')] <= expected_count[ord(S[i]) - ord('a')]:
count += 1
if count == len(T):
while expected_count[ord(S[start]) - ord('a')] == 0 or \
current_count[ord(S[start]) - ord('a')] > expected_count[ord(S[start]) - ord('a')]:
current_count[ord(S[start]) - ord('a')] -= 1
start += 1
if min_width > i - start + 1:
min_width = i - start + 1
min_start = start
i += 1
if min_width == float("inf"):
return ""
return S[min_start:min_width+min_start]
class Solution2:
def minWindow(self, s, t):
"""
:type s: str
:type t: str
:rtype: str
"""
res = ""
len_s = len(s)
len_t = len(t)
dict = collections.defaultdict(int)
cnt = 0
minLen = float("inf")
for i in xrange(len_t):
dict[t[i]] += 1
s_idx = 0
for i in xrange(len_s):
if s[i] in dict:
dict[s[i]] -= 1
if dict[s[i]] >= 0:
cnt += 1
while cnt == len(t):
if s[s_idx] in dict:
dict[s[s_idx]] += 1
if dict[s[s_idx]] > 0:
if minLen > i - s_idx + 1:
minLen = i - s_idx + 1
res = s[s_idx: i+1]
cnt -= 1
s_idx += 1
return res
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
self.assertEqual("BANC", Solution2().minWindow("ADOBECODEBANC", "ABC"))
self.assertEqual("BANC", Solution().minWindow("ADOBECODEBANC", "ABC"))
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought: https://leetcode.com/problems/minimum-window-substring/solution/
# 25ms 52.80%
class Solution {
public String minWindow(String s, String t) {
String res = "";
if(s == null || t == null || s.length() == 0 || t.length() == 0) return res;
int minLen = Integer.MAX_VALUE;
Map<Character, Integer> map = new HashMap<>();
for( int i = 0; i < t.length(); i++){
if(!map.containsKey(t.charAt(i))){
map.put(t.charAt(i), 0);
}
map.put(t.charAt(i), map.get(t.charAt(i)) + 1);
}
int cnt = 0;
int prev = 0;
for(int i = 0 ; i < s.length(); i++){
char c = s.charAt(i);
if(map.containsKey(c)){
map.put(c, map.get(c) - 1);
if(map.get(c) >= 0){
cnt += 1;
}
while(cnt == t.length()){
char p = s.charAt(prev);
if(map.containsKey(p)){
map.put(p, map.get(p) + 1);
if(map.get(p) > 0){
if(minLen > i - prev + 1){
minLen = i - prev + 1;
res = s.substring(prev, i + 1);
}
cnt --;
}
}
prev ++;
}
}
}
return res;
}
}
#76.01% 7ms
class Solution {
public String minWindow(String s, String t) {
int lenS = s.length();
int lenT = t.length();
if (lenS == 0 || lenT == 0) {
return "";
}
int[] sCount = new int[128];
int[] tCount = new int[128];
int count = lenT;
int[] result = new int[] {-1, -1};
int start = 0;
for (int i = 0; i < lenT; i++) {
tCount[t.charAt(i)]++;
}
for (int i = 0; i < lenS; i++) {
char c = s.charAt(i);
sCount[c]++;
if (sCount[c] <= tCount[c]) {
count--;
}
if (count == 0) {
while (true) {
char remove = s.charAt(start);
if (sCount[remove] <= tCount[remove]) {
break;
}
sCount[remove]--;
start++;
}
if (result[0] < 0 || result[1] - result[0] > i + 1 - start) {
result[0] = start;
result[1] = i + 1;
}
sCount[s.charAt(start++)]--;
count++;
}
}
return result[0] < 0 ? "" : s.substring(result[0], result[1]);
}
}
# 4ms 96.63%
class Solution {
public String minWindow(String s, String t) {
if (s == null || t == null || s.length() == 0 || t.length() == 0) {
return null;
}
int start = -1;
int end = s.length() + 1;
int left = 0;
int windowSize = 0;
int[] count = new int[256];
for (char c : t.toCharArray()) {
count[c]++;
}
for (int i = 0; i < s.leng | th(); ++i) {
if(--count[s.charAt(i)] >= 0) {
windowSize++;
}
if (windowSize == t.length()) {
while (++count[s.charAt(left)] <= 0) {
left++;
}
if (i - left < end - start) {
start = left;
end = i;
}
left++;
windowSize--;
}
}
| return start == -1 ? "" : s.substring(start, end + 1);
}
}
'''
|
breunigs/livestreamer | src/livestreamer/plugins/filmon_us.py | Python | bsd-2-clause | 3,678 | 0.002719 | import re
import requests
from livestreamer.compat import urlparse
from livestreamer.exceptions import PluginError, NoStreamsError
from livestreamer.plugin import Plugin
from livestreamer.stream import RTMPStream, HTTPStream
from livestreamer.utils import urlget, urlresolve, prepend_www
RTMP_URL = "rtmp://204.107.26.73/battlecam"
RTMP_UPLOAD_URL = "rtmp://204.1 | 07.26.75/streamer"
SWF_URL = "http://www.filmon.us/application/themes/base/flash/broadcast/VideoChatECCDN_debug_withoutCenteredOwner.swf"
SWF_UPLOAD_URL = "http://www.battlecam.com/application/themes/base/flash/MediaPlayer.swf"
class Filmon_us(Plugin):
@classmethod
def can_handle_url(self, url):
return "filmon.us" in url
def _get_streams(self):
if not RTMPStream.is_usable(self.session):
raise PluginError("rtmpdump i | s not usable and required by Filmon_us plugin")
streams = {}
try:
# history video
if "filmon.us/history" in self.url or "filmon.us/video/history/hid" in self.url:
streams['default'] = self._get_history()
# uploaded video
elif "filmon.us/video" in self.url:
streams['default'] = self._get_stream_upload()
# live video
else:
streams['default'] = self._get_stream_live()
except NoStreamsError:
pass
return streams
def _get_history(self):
video_id = self.url.rstrip("/").rpartition("/")[2]
self.logger.debug("Testing if video exist")
history_url = 'http://www.filmon.us/video/history/hid/' + video_id
if urlresolve(prepend_www(history_url)) == '/':
raise PluginError("history number " + video_id + " don't exist")
self.logger.debug("Fetching video URL")
res = urlget(history_url)
match = re.search("http://cloud.battlecam.com/([/\w]+).flv", res.text)
if not match:
return
url = match.group(0)
return HTTPStream(self.session, url)
def _get_stream_upload(self):
video = urlparse(self.url).path
if urlresolve(prepend_www(self.url)) == 'http://www.filmon.us/channels':
raise PluginError(video + " don't exist")
playpath = "mp4:resources" + video + '/v_3.mp4'
rtmp = RTMP_UPLOAD_URL
parsed = urlparse(rtmp)
app = parsed.path[1:]
return RTMPStream(self.session, {
"rtmp": rtmp,
"pageUrl": self.url,
"swfUrl": SWF_UPLOAD_URL,
"playpath": playpath,
"app": app,
"live": True
})
def _get_stream_live(self):
self.logger.debug("Fetching room_id")
res = urlget(self.url)
match = re.search("room/id/(\d+)", res.text)
if not match:
return
room_id = match.group(1)
self.logger.debug("Comparing channel name with URL")
match = re.search("<meta property=\"og:url\" content=\"http://www.filmon.us/(\w+)", res.text)
if not match:
return
channel_name = match.group(1)
base_name = self.url.rstrip("/").rpartition("/")[2]
if (channel_name != base_name):
return
playpath = "mp4:bc_" + room_id
if not playpath:
raise NoStreamsError(self.url)
rtmp = RTMP_URL
parsed = urlparse(rtmp)
app = parsed.path[1:]
return RTMPStream(self.session, {
"rtmp": RTMP_URL,
"pageUrl": self.url,
"swfUrl": SWF_URL,
"playpath": playpath,
"app": app,
"live": True
})
__plugin__ = Filmon_us
|
rec/BiblioPixel | bibliopixel/control/rest/decorator.py | Python | mit | 2,062 | 0.00097 | import flask, functools, traceback, urllib
from .. import editor
NO_PROJECT_ERROR = 'No Project is currently loaded'
BAD_ADDRESS_ERROR = 'Bad address {address}'
BAD_GETTER_ERROR = 'Couldn\'t get address {address}'
BAD_SETTER_ERROR = 'Couldn\'t set value {value} at address {address}'
def single(method):
"""Decorator for RestServer methods that take a single address"""
@functools.wraps(method)
def single(self, address, value=None):
address = urllib.parse.unquote_plus(address)
try:
error = NO_PROJECT_ERROR
if not self.project:
raise ValueError
error = BAD_ADDRESS_ERROR
ed = editor.Editor(address, self.project)
if value is None:
error = BAD_GETTER_ERROR
result = method(self, ed)
else:
| error = BAD_SETTER_ERROR
result = method(self, ed, value)
result = {'value': result}
except Except | ion as e:
traceback.print_exc()
msg = '%s\n%s' % (error.format(**locals()), e)
result = {'error': msg}
return flask.jsonify(result)
return single
def multi(method):
"""Decorator for RestServer methods that take multiple addresses"""
@functools.wraps(method)
def multi(self, address=''):
values = flask.request.values
address = urllib.parse.unquote_plus(address)
if address and values and not address.endswith('.'):
address += '.'
result = {}
for a in values or '':
try:
if not self.project:
raise ValueError('No Project is currently loaded')
ed = editor.Editor(address + a, self.project)
result[address + a] = {'value': method(self, ed, a)}
except:
if self.project:
traceback.print_exc()
result[address + a] = {'error': 'Could not multi addr %s' % a}
return flask.jsonify(result)
return multi
|
tburrows13/Game-of-Life | tools.py | Python | mit | 771 | 0 | import time
def import_grid(file_to_open):
grid = []
print(file_to_open)
with open(file_to_open) as file:
for i, line in enumerate(file): |
if i == 0:
iterations = int(line.split(" ")[0])
delay = float(line.split(" ")[1])
else:
grid.append([])
line = line.strip()
for item in line:
grid[i-1].append(int( | item))
return grid, iterations, delay
def save_grid(file, grid):
with open(file, 'w') as file:
for line in grid:
file.write(line + "\n")
def check_time(prev_time, freq):
if time.time() - prev_time > freq:
return True
else:
return False
|
pyrocko/pyrocko | src/apps/colosseo.py | Python | gpl-3.0 | 7,525 | 0 | from __future__ import print_function
# http://pyrocko.org - GPLv3
#
# The Pyrocko Developers, 21st Century
# ---|P------/S----------~Lg----------
import sys
import logging
import os.path as op
from optparse import OptionParser
from pyrocko import util, scenario, guts, gf
from pyrocko import __version__
logger = logging.getLogger('pyrocko.apps.colosseo')
km = 1000.
def d2u(d):
return dict((k.replace('-', '_'), v) for (k, v) in d.items())
description = '''This is Colosseo, an earthquake scenario generator.
Create seismic waveforms, InSAR and GNSS offsets for a simulated earthquake
scenario.
Colosseo is part of Pyrocko. Version %s.
''' % __version__
subcommand_descriptions = {
'init': 'initialize a new, blank scenario',
'fill': 'fill the scenario with modelled data',
'snuffle': 'open Snuffler to inspect the waveform data',
'map': 'map the scenario arena'
}
subcommand_usages = {
'init': 'init <scenario_dir>',
'fill': 'fill <scenario_dir>',
'snuffle': 'snuffle <scenario_dir>',
'map': '<scenario_dir>',
}
subcommands = subcommand_descriptions.keys()
program_name = 'colosseo'
usage_tdata = d2u(subcommand_descriptions)
usage_tdata['program_name'] = program_name
usage_tdata['description'] = description
usage = '''%(program_name)s <subcommand> [options] [--] <arguments> ...
%(description)s
Subcommands:
init %(init)s
fill %(fill)s
snuffle %(snuffle)s
map %(map)s
To get further help and a list of available options for any subcommand run:
%(program_name)s <subcommand> --help
''' % usage_tdata
def die(message, err='', prelude=''):
if prelude:
prelude = prelude + '\n'
if err:
err = '\n' + err
sys.exit('%s%s failed: %s%s' % (prelude, program_name, message, err))
def none_or_float(x):
if x == 'none':
return None
else:
return float(x)
def add_common_options(parser):
parser.add_option(
'--loglevel',
action='store',
dest='loglevel',
type='choice',
choices=('critical', 'error', 'warning', 'info', 'debug'),
default='info',
help='set logger level to '
'"critical", "error", "warning", "info", or "debug". '
'Default is "%default".')
def process_common_options(options):
util.setup_logging(program_name, options.loglevel)
def cl_parse(command, args, setup=None, details=None):
usage = subcommand_usages[command]
descr = subcommand_descriptions[command]
if isinstance(usage, str):
usage = [usage]
susage = '%s %s' % (program_name, usage[0])
for s in usage[1:]:
susage += '\n%s%s %s' % (' '*7, program_name, s)
description = descr[0].upper() + descr[1:] + '.'
if details:
description = description + ' %s' % details
parser = OptionParser(usage=susage, description=description)
if setup:
setup(parser)
add_common_options(parser)
(options, args) = parser.parse_args(args)
process_common_options(options)
return parser, options, args
def get_scenario_yml(path):
fn = op.join(path, 'scenario.yml')
if op.exists(fn):
return fn
return False
def command_init(args):
def setup(parser):
parser.add_option(
'--force', dest='force', action='store_true',
help='overwrite existing files')
parser.add_option(
'--location', dest='location', metavar='LAT,LON',
help='set scenario center location [deg]')
parser.add_option(
'--radius', dest='radius', metavar='RADIUS', type=float,
help='set scenario center location [km]')
parser, options, args = cl_parse('init', args, setup=setup)
if len(args) != 1:
parser.print_help()
sys.exit(1)
if options.location:
try:
lat, lon = map(float, options.location.split(','))
except Exception:
die('expected --location=LAT,LON')
else:
lat = lon = None
if options.radius is not None:
radius = options.radius * km
else:
radius = None
project_dir = args[0]
try:
scenario.ScenarioGenerator.initialize(
project_dir, lat, lon, radius, force=options.force)
gf_stores_path = op.join(project_dir, 'gf_stores')
util.ensuredir(gf_stores_path)
except scenario.CannotCreatePath as e:
die(str(e) + ' Use --force to override.')
except scenario.ScenarioError as e:
die(str(e))
def command_fill(args):
def setup(parser):
parser.add_option(
'--force', dest='force', action='store_true',
help='overwrite existing files')
parser, options, args = cl_parse('fill', args, setup=setup)
if len(args) == 0:
args.append('.')
fn = get_scenario_yml(args[0])
if not fn:
parser.print_help()
sys.exit(1)
project_dir = args[0]
gf_stores_path = op.join(project_dir, 'gf_stores')
try:
engine = get_engine([gf_stores_path])
sc = guts.load(filename=fn)
sc.init_modelling(engine)
sc.ensure_gfstores(interactive=True)
sc.prepare_data(path=project_dir, overwrite=options.force)
sc.ensure_data(path=project_dir)
sc.make_map(op.join(project_dir, 'map.pdf'))
except scenario.CannotCreatePath as e:
die(str(e) + ' Use --force to override.')
except scenario.ScenarioError as e:
die(str(e))
def command_map(args):
parser, options, args = cl_par | se('map', args)
if len(args) == 0:
args.append('.')
fn = get_scenario_yml(args[0])
if not fn:
parser.print_help()
sys.exit(1)
project_dir = args[0]
gf_stores_path = op.join(project_dir, 'gf_stores')
engine = get_engine([gf_stores_path])
try:
sc = guts.load(filename=fn)
sc.init_modelling(engine)
| sc.make_map(op.join(project_dir, 'map.pdf'))
except scenario.ScenarioError as e:
die(str(e))
def command_snuffle(args):
from pyrocko.gui import snuffler
parser, options, args = cl_parse('map', args)
if len(args) == 0:
args.append('.')
fn = get_scenario_yml(args[0])
if not fn:
parser.print_help()
sys.exit(1)
project_dir = args[0]
gf_stores_path = op.join(project_dir, 'gf_stores')
engine = get_engine([gf_stores_path])
sc = guts.load(filename=fn)
sc.init_modelling(engine)
return snuffler.snuffle(
sc.get_pile(),
stations=sc.get_stations(),
events=sc.get_events())
def main(args=None):
if args is None:
args = sys.argv[1:]
if len(args) < 1:
sys.exit('Usage: %s' % usage)
command = args.pop(0)
if command in subcommands:
globals()['command_' + command](args)
elif command in ('--help', '-h', 'help'):
if command == 'help' and args:
acommand = args[0]
if acommand in subcommands:
globals()['command_' + acommand](['--help'])
sys.exit('Usage: %s' % usage)
else:
sys.exit('%s: error: no such subcommand: %s' % (program_name, command))
def get_engine(gf_store_superdirs):
engine = gf.LocalEngine(
store_superdirs=gf_store_superdirs, use_config=True)
logger.info(
'Directories to be searched for GF stores:\n%s'
% '\n'.join(' ' + s for s in engine.store_superdirs))
return engine
if __name__ == '__main__':
main()
|
ECP-CANDLE/Supervisor | workflows/async-search/python/utils.py | Python | mit | 594 | 0.001684 | from string import Template
import re
import os
import sys
import time
import json
import math
import os
import subprocess
import csv
def saveResults(resultsList, json_fname, csv_fname):
print(resultsList)
print(json.dumps(resultsList, indent=4, sort_keys=True))
with open(json_fname, 'w') as outfile:
json.dump(resultsList, o | utfile, indent=4, sort_keys=True)
keys = resultsList[0].keys()
with open(csv_fname, 'w') as output_file:
| dict_writer = csv.DictWriter(output_file, keys)
dict_writer.writeheader()
dict_writer.writerows(resultsList)
|
billiob/papyon | papyon/util/odict.py | Python | gpl-2.0 | 1,321 | 0.006056 | from UserDict import UserDict
class odict(UserDict):
def __init__(self, dict = None):
self._keys = []
UserDict.__init__(self, dict)
def __delitem__(self, key):
UserDict.__delitem__(self, key)
self._keys.remove(key)
def __setitem__(self, key, item):
UserDict.__setitem__(self, key, item)
if key not in self._keys: self._keys.append(key)
def clear(self):
UserD | ict.clear(self)
self._keys = []
def copy(self):
dict = UserDict.copy(self)
dict._keys = self._keys[:]
return dict
def items(self):
return map(lambda key: (key, self[key]), self._keys)
def keys(self):
return self._keys[:]
def popitem(self):
try:
key = self._keys[ | -1]
except IndexError:
raise KeyError('dictionary is empty')
val = self[key]
del self[key]
return (key, val)
def setdefault(self, key, failobj = None):
if key not in self._keys: self._keys.append(key)
return UserDict.setdefault(self, key, failobj)
def update(self, dict):
UserDict.update(self, dict)
for key in dict.keys():
if key not in self._keys: self._keys.append(key)
def values(self):
return map(self.get, self._keys)
|
albertfxwang/grizli | grizli/multifit.py | Python | mit | 180,455 | 0.010662 | """Functionality for manipulating multiple grism exposures simultaneously
"""
import os
import time
import glob
from collections import OrderedDict
import multiprocessing as mp
import scipy.ndimage as nd
import numpy as np
import matplotlib.pyplot as plt
from astropy.table import Table
import astropy.io.fits as pyfits
import astropy.wcs as pywcs
import astropy.units as u
## l | ocal i | mports
from . import utils
from . import model
#from . import stack
from .fitting import GroupFitter
from .utils_c import disperse
from .utils_c import interp
from .utils import GRISM_COLORS, GRISM_MAJOR, GRISM_LIMITS, DEFAULT_LINE_LIST
def test():
import glob
from grizlidev import utils
import grizlidev.multifit
reload(utils)
reload(grizlidev.model)
reload(grizlidev.multifit)
files=glob.glob('i*flt.fits')
output_list, filter_list = utils.parse_flt_files(files, uniquename=False)
# grism_files = filter_list['G141'][164]
# #grism_files.extend(filter_list['G141'][247])
#
# direct_files = filter_list['F140W'][164][:4]
#direct_files.extend(filter_list['F140W'][247][:4])
# grp = grizlidev.multifit.GroupFLT(grism_files=grism_files, direct_files=direct_files)
#
#
# grp = grizlidev.multifit.GroupFLT(grism_files=grism_files, direct_files=direct_files, ref_file=ref)
# ref = 'MACS0416-F140W_drz_sci_filled.fits'
# seg = 'hff_m0416_v0.1_bkg_detection_seg_grow.fits'
# catalog = 'hff_m0416_v0.1_f140w.cat'
#
# key = 'cl1301-11.3-122.5-g102'
# seg = 'cl1301-11.3-14-122-f105w_seg.fits'
# catalog = 'cl1301-11.3-14-122-f105w.cat'
# #ref = 'cl1301-11.3-14-122-f105w_drz_sci.fits'
# grism_files = output_list[key]
# direct_files = output_list[key.replace('f105w','g102')]
grism_files = filter_list['G141'][1]
grism_files.extend(filter_list['G141'][33])
grism_files = glob.glob('*cmb.fits')
ref = 'F160W_mosaic.fits'
seg = 'F160W_seg_blot.fits'
catalog = '/Users/brammer/3DHST/Spectra/Work/3DHST_Detection/GOODS-N_IR.cat'
direct_files = []
reload(utils)
reload(grizlidev.model)
reload(grizlidev.multifit)
grp = grizlidev.multifit.GroupFLT(grism_files=grism_files[:8], direct_files=direct_files, ref_file=ref, seg_file=seg, catalog=catalog)
self = grp
fit_info = {3286: {'mag':-99, 'spec': None},
3279: {'mag':-99, 'spec': None}}
fit_info = OrderedDict()
bright = self.catalog['MAG_AUTO'] < 25
ids = self.catalog['NUMBER'][bright]
mags = self.catalog['MAG_AUTO'][bright]
for id, mag in zip(ids, mags):
fit_info[id] = {'mag':mag, 'spec': None}
# Fast?
#fit_info = {3212: {'mag':-99, 'spec': None}}
#self.compute_single_model(3212)
### parallel
self.compute_full_model(fit_info, store=False)
## Refine
bright = (self.catalog['MAG_AUTO'] < 22) & (self.catalog['MAG_AUTO'] > 16)
ids = self.catalog['NUMBER'][bright]*1
mags = self.catalog['MAG_AUTO'][bright]*1
so = np.argsort(mags)
ids, mags = ids[so], mags[so]
self.refine_list(ids, mags, ds9=ds9, poly_order=1)
# bright = (self.catalog['MAG_AUTO'] < 22) & (self.catalog['MAG_AUTO'] > 16)
# ids = self.catalog['NUMBER'][bright]*1
# mags = self.catalog['MAG_AUTO'][bright]*1
# so = np.argsort(mags)
#
# self.refine_list(ids, mags, ds9=ds9, poly_order=5)
beams = self.get_beams(3212)
### serial
t0 = time.time()
out = _compute_model(0, self.FLTs[i], fit_info, False, False)
t1 = time.time()
#print t1-t0
id = 3219
fwhm = 1200
zr = [0.58,2.4]
beams = grp.get_beams(id, size=30)
mb = grizlidev.multifit.MultiBeam(beams)
fit, fig = mb.fit_redshift(fwhm=fwhm, zr=zr, poly_order=3, dz=[0.003, 0.003])
A, out_coeffs, chi2, modelf = mb.fit_at_z(poly_order=1)
m2d = mb.reshape_flat(modelf)
def _loadFLT(grism_file, sci_extn, direct_file, pad, ref_file,
ref_ext, seg_file, verbose, catalog, ix):
"""Helper function for loading `.model.GrismFLT` objects with `multiprocessing`.
TBD
"""
import time
try:
import cPickle as pickle
except:
# Python 3
import pickle
## slight random delay to avoid synchronization problems
# np.random.seed(ix)
# sleeptime = ix*1
# print '%s sleep %.3f %d' %(grism_file, sleeptime, ix)
# time.sleep(sleeptime)
#print grism_file, direct_file
new_root = '.{0:02d}.GrismFLT.fits'.format(sci_extn)
save_file = grism_file.replace('_flt.fits', new_root)
save_file = save_file.replace('_flc.fits', new_root)
save_file = save_file.replace('_cmb.fits', new_root)
save_file = save_file.replace('_rate.fits', new_root)
if (grism_file.find('_') < 0) & ('GrismFLT' not in grism_file):
save_file = 'xxxxxxxxxxxxxxxxxxx'
if os.path.exists(save_file):
print('Load {0}!'.format(save_file))
fp = open(save_file.replace('GrismFLT.fits', 'GrismFLT.pkl'), 'rb')
flt = pickle.load(fp)
fp.close()
status = flt.load_from_fits(save_file)
else:
flt = model.GrismFLT(grism_file=grism_file, sci_extn=sci_extn,
direct_file=direct_file, pad=pad,
ref_file=ref_file, ref_ext=ref_ext,
seg_file=seg_file, shrink_segimage=True,
verbose=verbose)
if flt.direct.wcs.wcs.has_pc():
for obj in [flt.grism, flt.direct]:
obj.get_wcs()
if catalog is not None:
flt.catalog = flt.blot_catalog(catalog,
sextractor=('X_WORLD' in catalog.colnames))
flt.catalog_file = catalog
else:
flt.catalog = None
if flt.grism.instrument in ['NIRISS', 'NIRCAM']:
flt.transform_NIRISS()
return flt #, out_cat
def _fit_at_z(self, zgrid, i, templates, fitter, fit_background, poly_order):
"""
For parallel processing
"""
# self, z=0., templates={}, fitter='nnls',
# fit_background=True, poly_order=0
print(i, zgrid[i])
out = self.fit_at_z(z=zgrid[i], templates=templates,
fitter=fitter, poly_order=poly_order,
fit_background=fit_background)
data = {'out':out, 'i':i}
return data
#A, coeffs[i,:], chi2[i], model_2d = out
def test_parallel():
zgrid = np.linspace(1.1,1.3,10)
templates = mb.load_templates(fwhm=800)
fitter = 'nnls'
fit_background = True
poly_order = 0
self.FLTs = []
t0_pool = time.time()
pool = mp.Pool(processes=4)
results = [pool.apply_async(_fit_at_z, (mb, zgrid, i, templates, fitter, fit_background, poly_order)) for i in range(len(zgrid))]
pool.close()
pool.join()
chi = zgrid*0.
for res in results:
data = res.get(timeout=1)
A, coeffs, chi[data['i']], model_2d = data['out']
#flt_i.catalog = cat_i
t1_pool = time.time()
def _compute_model(i, flt, fit_info, is_cgs, store):
"""Helper function for computing model orders.
"""
for id in fit_info:
try:
status = flt.compute_model_orders(id=id, compute_size=True,
mag=fit_info[id]['mag'], in_place=True, store=store,
spectrum_1d = fit_info[id]['spec'], is_cgs=is_cgs,
verbose=False)
except:
print('Failed: {0} {1}'.format(flt.grism.parent_file, id))
continue
print('{0}: _compute_model Done'.format(flt.grism.parent_file))
return i, flt.model, flt.object_dispersers
class GroupFLT():
def __init__(self, grism_files=[], sci_extn=1, direct_files=[],
pad=200, group_name='group',
ref_file=None, ref_ext=0, seg_file=None,
shrink_segimage=True, verbose=True, cpu_count=0,
catalog='', |
not-na/peng3d | docs/pyglet/graphics/vertexdomain.py | Python | gpl-2.0 | 851 | 0.008226 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# vertexdomain.py
#
# Copyright 2016 notna <notna@apparat.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at | your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOU | T ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
|
nburn42/tensorflow | tensorflow/python/ops/linalg_grad.py | Python | apache-2.0 | 14,666 | 0.009 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradients for operators defined in linalg_ops.py.
Useful reference for derivative formulas is
An extended collection of matrix derivative results for forward and reverse
mode algorithmic differentiation by Mike Giles:
http://eprints.maths.ox.ac.uk/1079/1/NA-08-01.pdf
A detailed derivation of formulas for backpropagating through spectral layers
(SVD and Eig) by Ionescu, Vantzos & Sminchisescu:
https://arxiv.org/pdf/1509.07838v4.pdf
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.linalg import linalg_impl as _linalg
@ops.RegisterGradient("MatrixInverse")
def _MatrixInverseGrad(op, grad):
"""Gradient for MatrixInverse."""
ainv = op.outputs[0]
return -math_ops.matmul(
ainv, math_ops.matmul(grad, ainv, adjoint_b=True), adjoint_a=True)
@ops.RegisterGradient("MatrixDeterminant")
def _MatrixDeterminantGrad(op, grad):
"""Gradient for MatrixDeterminant."""
a = op.inputs[0]
c = op.outputs[0]
a_adj_inv = linalg_ops.matrix_inverse(a, adjoint=True)
multipliers = array_ops.reshape(grad * c,
array_ops.concat([array_ops.shape(c), [1, 1]],
0))
return multipliers * a_adj_inv
@ops.RegisterGradient("Cholesky")
def _CholeskyGrad(op, grad):
"""Gradient for Cholesky."""
# Gradient is l^{-H} @ ((l^{H} @ grad) * (tril(ones)-1/2*eye)) @ l^{-1}
l = op.outputs[0]
num_rows = array_ops.shape(l)[-1]
batch_shape = array_ops.shape(l)[:-2]
l_inverse = linalg_ops.matrix_triangular_solve(l,
linalg_ops.eye(
num_rows,
batch_shape=batch_shape,
dtype=l.dtype))
middle = math_ops.matmul(l, grad, adjoint_a=True)
middle = array_ops.matrix_set_diag(middle,
0.5 * array_ops.matrix_diag_part(middle))
middle = array_ops.matrix_band_part(middle, -1, 0)
grad_a = math_ops.matmul(
math_ops.matmul(l_inverse, middle, adjoint_a=True), l_inverse)
grad_a += _linalg.adjoint(grad_a)
return grad_a * 0.5
@ops.RegisterGradient("Qr")
def _QrGrad(op, dq, dr):
"""Gradient for Qr."""
q, r = op.outputs
if q.dtype.is_complex:
raise NotImplementedError("QrGrad not implemented for dtype: %s" % q.dtype)
if (r.shape.ndims is None or r.shape.as_list()[-2] is None or
r.shape.as_list()[-1] is None):
raise NotImplementedError("QrGrad not implemented with dynamic shapes.")
if r.shape[-2].value != r.shape[-1].value:
raise NotImplementedError("QrGrad not implemented when ncols > nrows "
"or full_matrices is true and ncols != nrows.")
qdq = math_ops.matmul(q, dq, adjoint_a=True)
qdq_ = qdq - _linalg.adjoint(qdq)
rdr = math_ops.matmul(r, dr, adjoint_b=True)
rdr_ = rdr - _linalg.adjoint(rdr)
tril = array_ops.matrix_band_part(qdq_ + rdr_, -1, 0)
def _TriangularSolve(x, r):
"""Equiv to matmul(x, adjoint(matrix_inverse(r))) if r is upper-tri."""
return _linalg.adjoint(
linalg_ops.matrix_triangular_solve(
r, _linalg.adjoint(x), lower=False, adjoint=False))
grad_a = math_ops.matmul(q, dr + _TriangularSolve(tril, r))
grad_b = _TriangularSolve(dq - math_ops.matmul(q, qdq), r)
return grad_a + grad_b
@ops.RegisterGradient("MatrixSolve")
def _MatrixSolveGrad(op, grad):
"""Gradient for MatrixSolve."""
a = op.inputs[0]
adjoint_a = op.get_attr("adjoint")
c = op.outputs[0]
grad_b = linalg_ops.matrix_solve(a, grad, adjoint=not adjoint_a)
if adjoint_a:
grad_a = -math_ops.matmul(c, grad_b, adjoint_b=True)
else:
grad_a = -math_ops.matmul(grad_b, c, adjoint_b=True)
return (grad_a, grad_b)
@ops.RegisterGradient("MatrixSolveLs")
def _MatrixSolveLsGrad(op, grad):
"""Gradients for MatrixSolveLs."""
# TODO(rmlarsen): The implementation could be more efficient:
# a) Output the Cholesky factorization from forward op instead of
# recomputing it here.
# b) Implement a symmetric rank-k update op instead of computing
# x*z + transpose(x*z). This pattern occurs other places in TensorFlow.
def _Overdetermined(op, grad):
"""Gradients for the overdetermined case of MatrixSolveLs.
This is the backprop for the solution to the normal equations of the first
kind:
X = F(A, B) = (A^T * A + lambda * I)^{-1} * A^T * B
which solve the least squares problem
min ||A * X - B||_F^2 + lambda ||X||_F^2.
"""
a = op.inputs[0]
b = op.inputs[1]
x = op.outputs[0]
l2_regularizer = math_ops.cast(op.inputs[2], a.dtype.base_dtype)
# pylint: disable=protected-access
chol = linalg_ops._RegularizedGramianCholesky(
a, l2_regularizer=l2_regularizer, first_kind=True)
# pylint: enable=protected-access
# Temporary z = (A^T * A + lambda * I)^{-1} * grad.
z = linalg_ops.cholesky_solve(chol, grad)
xzt = math_ops.matmul(x, z, adjoint_b=True)
zx_sym = xzt + array_ops.matrix_transpose(xzt)
grad_a = -math_ops.matmul(a, zx_sym) + math_ops.matmul(b, z, adjoint_b=True)
grad_b = math_ops.matmul(a, z)
return (grad_a, grad_b, None)
def _Underdetermined(op, grad):
"""Gradients for the underdetermined case of MatrixSolveLs.
This is the backprop for the solution to the normal equations of the second
kind:
X = F(A, B) = A * (A*A^T + lambda*I)^{-1} * B
that (for lambda=0) solve the least squares problem
min ||X||_F subject to | A*X = B.
"""
a = op.inputs[0]
b = op.inputs[1]
l2_regularizer = math_ops.cast(op.inputs[2], a.dtype.base_dtype)
# pylint: disable=protected-access
chol = linalg_ops._RegularizedGramianCholesky(
a, l2_regularizer=l2_regularizer, first_kind=False)
# pylint: enable=protected-access
grad_b = linalg_ | ops.cholesky_solve(chol, math_ops.matmul(a, grad))
# Temporary tmp = (A * A^T + lambda * I)^{-1} * B.
tmp = linalg_ops.cholesky_solve(chol, b)
a1 = math_ops.matmul(tmp, a, adjoint_a=True)
a1 = -math_ops.matmul(grad_b, a1)
a2 = grad - math_ops.matmul(a, grad_b, adjoint_a=True)
a2 = math_ops.matmul(tmp, a2, adjoint_b=True)
grad_a = a1 + a2
return (grad_a, grad_b, None)
fast = op.get_attr("fast")
if fast is False:
raise ValueError("Gradient not defined for fast=False")
matrix_shape = op.inputs[0].get_shape()[-2:]
if matrix_shape.is_fully_defined():
if matrix_shape[-2] >= matrix_shape[-1]:
return _Overdetermined(op, grad)
else:
return _Underdetermined(op, grad)
else:
# We have to defer determining the shape to runtime and use
# conditional execution of the appropriate graph.
matrix_shape = array_ops.shape(op.inputs[0])[-2:]
return control_flow_ops.cond(matrix_shape[-2] >= matrix_shape[-1],
lambda: _Overdetermined(op, grad),
lambda: _Underdetermined(op, grad))
@ops.RegisterGradient("MatrixTriangularSolve")
def _MatrixTriangularSolveGrad(op, grad):
"""Gradient for MatrixTriangularSolve."""
a = op.inputs[0]
adjoint_a = |
jplusplus/dystopia-tracker | app/core/migrations/0001_initial.py | Python | lgpl-3.0 | 9,322 | 0.008904 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Category'
db.create_table(u'core_category', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('title', self.gf('django.db.models.fields.CharField')(max_length=75)),
))
db.send_create_signal(u'core', ['Category'])
# Adding model 'Source'
db.create_table(u'core_source', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('type', self.gf('django.db.models.fields.CharField')(max_length=20)),
('title', self.gf('django.db.models.fields.CharField')(max_length=75)),
('author', self.gf('django.db.models.fields.CharField')(max_length=75)),
('year_published', self.gf('django.db.models.fields.PositiveIntegerField')()),
('more_info', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('series_season', self.gf('django.db.models.fields.PositiveIntegerField')()),
('series_episode', self.gf('django.db.models.fields.PositiveIntegerField')()),
('description_E', self.gf('django.db.models.fields.TextField')(max_length=300)),
('description_D', self.gf('django.db.models.fields.TextField')(max_length=300)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, blank=True)),
('image_credit', self.gf('django.db.models.fields.CharField')(max_length=75, blank=True)),
))
db.send_create_signal(u'core', ['Source'])
# Adding model 'Prediction'
db.create_table(u'core_prediction', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('source', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Source'])),
('category', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Category'])),
('description_E', self.gf('django.db.models.fields.TextField')(max_length=300)),
('description_D', self.gf('django.db.models.fields.TextField')(max_length=300)),
('year_predicted', self.gf('django.db.models.fields.PositiveIntegerField')()),
('more_info', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('headline_E', self.gf('django.db.models.fields.TextField')(max_length=300)),
('headline_D', self.gf('django.db.models.fields.TextField')(max_length=300)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, blank=True)),
('image_credit', self.gf('django.db.models.fields.CharField')(max_length=75, blank=True)),
('username', self.gf('django.db.models.fields.CharField')(max_length=75)),
('creation_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('edition_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('published', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'core', ['Prediction'])
# Adding model 'Realisation'
db.create_table(u'core_realisation', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('prediction', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['core.Prediction'])),
('description_E', self.gf('django.db.models.fields.TextField')(max_length=300)),
('description_D', self.gf('django.db.models.fields.TextField')(max_length=300)),
('year_introduced', self.gf('django.db.models.fields.PositiveIntegerField')()),
('more_info', self.gf('django.db.models.fields.URLField')(max_length=200, blank=True)),
('image', self.gf('django.db.models.fields.files.ImageField')(max_length=100, blank=True)),
('image_credit', self.gf('django.db.models.fields.CharField')(max_length=75, blank=True)),
('creation_date', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('edition_date', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, blank=True)),
('published', self.gf('django.db.models.fields.BooleanField')(default=False)),
))
db.send_create_signal(u'core', ['Realisation'])
def backwards(self, orm):
# Deleting model 'Category'
db.delete_table(u'core_category')
# Deleting model 'Source'
db.delete_table(u'core_source')
# Deleting model 'Prediction'
db.delete_table(u'core_prediction')
# Deleting model 'Realisation'
db.delete_table(u'core_realisation')
models = {
u'core.category': {
'Meta': {'object_name': 'Category'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharFiel | d', [], {'max_length': '75'})
},
u'core.prediction': {
'Meta': {'object_name': 'Prediction'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Category']"}),
| 'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description_D': ('django.db.models.fields.TextField', [], {'max_length': '300'}),
'description_E': ('django.db.models.fields.TextField', [], {'max_length': '300'}),
'edition_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'headline_D': ('django.db.models.fields.TextField', [], {'max_length': '300'}),
'headline_E': ('django.db.models.fields.TextField', [], {'max_length': '300'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'image_credit': ('django.db.models.fields.CharField', [], {'max_length': '75', 'blank': 'True'}),
'more_info': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'source': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Source']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '75'}),
'year_predicted': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'core.realisation': {
'Meta': {'object_name': 'Realisation'},
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description_D': ('django.db.models.fields.TextField', [], {'max_length': '300'}),
'description_E': ('django.db.models.fields.TextField', [], {'max_length': '300'}),
'edition_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}),
'image_credit': ('django.db.models.fields.CharField', [], {'max_length': '75', 'blank': 'True'}),
'more_info': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'prediction': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['core.Prediction']"}),
'published': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'year_introduced': ('django.db.models.fields.PositiveIntegerField', [], {})
},
u'core.source': {
'Meta': {'object_name': 'Source'},
'author': ('djan |
raspibo/Livello1 | var/www/cgi-bin/valori2csv_search_date.py | Python | mit | 2,996 | 0.010013 | #!/usr/bin/env python3
"""
The MIT License (MIT)
Copyright (c) 2016 davide
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, dist | ribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in al | l
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
""" Prende i dati dalla chiave Redis (*:Valori) passata come argomento all'avvio,
cerca fra i parametri si start e stop (data minore, data maggiore),
elabora e ricrea il file .csv
"""
import os,time,json,redis,sys
import mjl, mhl, flt # Non servono tutte, ormai le metto d'abitudine ;)
DirBase="/var/www" # Meglio specificare il percorso assoluto
ConfigFile=DirBase+"/conf/config.json"
# Apro il database Redis con l'istruzione della mia libreria
MyDB = flt.OpenDBFile(ConfigFile)
# Controllo se piu` di un argomento o se richiesto l'help
if len(sys.argv) != 4 or sys.argv[1] == "-h":
print ("\n\tUso: %s <RedisKey> <Start> <Stop>" % sys.argv[0])
print ("""
Questo programma prende una chiave Redis contenente i valori (*:Valori),
elabora, e crea il file .csv
""")
exit()
if len(sys.argv) == 4 and MyDB.exists(sys.argv[1]):
# Setto le variabili per comodita` e chiarezza di programma
Key=sys.argv[1]
print ("Key: \t\t\t", Key)
# Ho usato il secondo e terzo valore (sets:NOME:ID), perche potrebbero esserci dei duplicati fra allarmi e grafici e .. altro (se ci sara`)
FileName=DirBase+"/"+Key.split(":")[4]+Key.split(":")[5]+".csv"
if os.path.isfile(FileName):
print ("Deleting: \t\t\"%s\"" % FileName)
os.remove(FileName) # Elimino il file se esiste
IntestazioneCSV="Data"
IntestazioneCSV=IntestazioneCSV+","+Key.split(":")[4] # 4 e` il tipo (temperatura/pir/..)
FileTemp = open(FileName,"w")
FileTemp.write(IntestazioneCSV+"\n") # Scrittura intestazione
for i in range (MyDB.llen(Key)):
ValoreCSV=flt.Decode(MyDB.lindex(Key,i))
if sys.argv[2] < ValoreCSV < sys.argv[3] :
FileTemp.write(ValoreCSV+"\n")
FileTemp.close()
print ("[re]Generated file: \t\"{}\"".format(FileName))
elif not MyDB.exists(sys.argv[1]):
print ("Chiave inesistente", sys.argv[1])
|
lukereding/mateChoiceTracking | low_light_tracker.py | Python | apache-2.0 | 17,260 | 0.025203 | import numpy as np
import cv2, csv, os, re, sys, time, argparse, datetime
'''
started 25 August 2015
31 August 2015:
modifying script so that it queries frames from a video taken with ffmpeg
10 Nov 2015
modifying for use in low light LCD tank (filters over the four flourescent light)
assumes there are four 'parts' to your video of each length. this only affects some of the stats the program prints at the end
important: the long side of the tank must be perpendicular to the camera view
assumes there are four 'parts' to your video of each length. this only affects some of the stats the program prints at the end
important: the long side of the tank must be perpendicular to the camera view
help menu: python realTimeTracker.py --help
arguments:
--pathToVideo: full or relative path to video file
--videoName: used to save files associated with the trial. required
example of useage: python realTimeTracker.py -i /Users/lukereding/Desktop/Bertha_Scototaxis.mp4 -n Bertha -f 10
'''
print time.strftime('%X %x %Z')
# initialize some constants, lists, csv writer
# construct the argument parse and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-i", "--pathToVideo", help = "integer that represents either the relative or full path to the video you want to analyze",nargs='?',default=0)
ap.add_argument("-n", "--videoName", help = "name of the video to be saved",required=True)
ap.add_argument("-f", "--fps", help = "frames per second of the video",required=True)
ap.add_argument("-b", "--bias", help = "proportion of time a fish spends on eit | her the right or lefthand side of the tank to be declared side bias. defaults to 0.75",nargs='?',default=0.75)
args = ap.parse_args()
# print arguments to the screen
print("\n\n\tinput path: {}".format(args.pathToVideo))
print("\tname of trial: {}".format(args.videoName))
print("\tfps of video: {}".format(args.fps))
print("\tbias: {}".format(args.bias))
args = vars(ap.parse_args())
fps = args["fps"]
bias = args["bias"]
if bias > 1 or bias < 0:
sys.exit("bias (-b) must be between 0 an | d 1")
# calculate the time that the program should start the main loop
start_time = time.time()
lower = np.array([0,0,0])
upper = np.array([255,255,20])
counter = 0
# output to csv file where the results will be written
name = args["videoName"]
print "name of csv file: " + str(name) + ".csv"
myfile = open(name+".csv",'wb')
csv_writer = csv.writer(myfile, quoting=csv.QUOTE_NONE)
csv_writer.writerow(("x","y","frame"))
# for drawing the rectangle around the tank at the beginning of the trial
drawing = False # true if mouse is pressed
ix,iy = -1,-1
# print python version
print "python version:\n"
print sys.version
######################
# declare some functions:####
#####################################
def checkSideBias(left,right,neutral,bias):
total = left + right + neutral
if left >= bias*total:
return("left side bias")
elif right >= bias*total:
return("right side bias")
else:
return("looks good")
def printUsefulStuff(listOfSides,fps,biasProp):
fps = int(fps)
# print realized fps for the trial
print "\ntotal frames: " + str(len(listOfSides))
# now subset the list of sides into four parts. each will be a quarter of the total length of the list
# there is probably a better way to do this, but I don't know what it is
leftPart1 = listOfSides[0:int(len(listOfSides)*0.2381)].count("left")
rightPart1 = listOfSides[0:int(len(listOfSides)*0.2381)].count("right")
neutralPart1 = listOfSides[0:int(len(listOfSides)*0.2381)].count("neutral")
# stimuli here
leftPart2 = listOfSides[int(len(listOfSides)*0.2381):int(len(listOfSides)*0.4762)].count("left")
rightPart2 = listOfSides[int(len(listOfSides)*0.2381):int(len(listOfSides)*0.4762)].count("right")
neutralPart2 = listOfSides[int(len(listOfSides)*0.2381):int(len(listOfSides)*0.4762)].count("neutral")
# stimuli here
leftPart3 = listOfSides[int(len(listOfSides)*0.5238):int(len(listOfSides)*0.7619)].count("left")
rightPart3 = listOfSides[int(len(listOfSides)*0.5238):int(len(listOfSides)*0.7619)].count("right")
neutralPart3 = listOfSides[int(len(listOfSides)*0.5238):int(len(listOfSides)*0.7619)].count("neutral")
leftPart4 = listOfSides[int(len(listOfSides)*0.7619):len(listOfSides)].count("left")
rightPart4 = listOfSides[int(len(listOfSides)*0.7619):len(listOfSides)].count("right")
neutralPart4 = listOfSides[int(len(listOfSides)*0.7619):len(listOfSides)].count("neutral")
# print association time stats to the screen for each part
print "------------------------------\n\n\n\n\n\n\nassociation time statistics for each part of the trial:"
print "\n\npart 1:\nframes 0 - " + str(int(len(listOfSides)*0.2381))
print "seconds left: " + str(leftPart1/fps)
print "seconds right: " + str(rightPart1/fps)
print "seconds neutral: " + str(neutralPart1/fps) + "\n"
print checkSideBias(leftPart1,rightPart1,neutralPart1,biasProp)
# print association time stats to the screen for each part
print "\n\npart 2:\nframes " + str(int(len(listOfSides)*0.2381)) + " - " + str(int(len(listOfSides)*0.4762))
print "seconds left: " + str(leftPart2/fps)
print "seconds right: " + str(rightPart2/fps)
print "seconds neutral: " + str(neutralPart2/fps) + "\n"
print checkSideBias(leftPart2,rightPart2,neutralPart2,biasProp)
# print association time stats to the screen for each part
print "\n\npart 3:\nframes " + str(int(len(listOfSides)*0.5238)) + " - " + str(int(len(listOfSides)*0.7619))
print "seconds left: " + str(leftPart3/fps)
print "seconds right: " + str(rightPart3/fps)
print "seconds neutral: " + str(neutralPart3/fps) + "\n"
print checkSideBias(leftPart3,rightPart3,neutralPart3,biasProp)
# print association time stats to the screen for each part
print "\n\npart 4:\n" + str(int(len(listOfSides)*0.7619)) + " - " + str(int(len(listOfSides)))
print "seconds left: " + str(leftPart4/fps)
print "seconds right: " + str(rightPart4/fps)
print "seconds neutral: " + str(neutralPart4/fps) + "\n"
print checkSideBias(leftPart4,rightPart4,neutralPart4,biasProp)
## check for side bias in the two parts where stimuli were present:
print "\n\nchecking side bias for parts 2 and 3, where male stimuli were present:\n\n"
print "left: " + str((leftPart2+leftPart3)/fps) + " seconds\nright: " + str((rightPart2+rightPart3)/fps) + "seconds\nneutral: " + str((neutralPart2+neutralPart3)/fps) + " seconds"
bias = checkSideBias(leftPart2+leftPart3,rightPart3+rightPart2,neutralPart3+neutralPart2,biasProp)
print bias
# check for time spend in the neutral zone
print "\nchecking for to see whether the fish spend > 50% of the trial in the neutral part of the tank:\n"
time_neutral = int((neutralPart2+neutralPart3)/fps)
print "time in neutral zone during parts 2 and 3: " + str(time_neutral)
if time_neutral > 300:
print "female spent more than half the time in the neutral zone. RETEST FEMALE."
if bias != "looks good":
print "\tFEMALE MUST BE RE-TESTED. SET ASIDE FEMALE AND RE-TEST AT A LATER DATE"
# set up video writer to save the video
def setupVideoWriter(width, height,videoName):
# Define the codec and create VideoWriter object
fourcc = cv2.cv.CV_FOURCC('m', 'p', '4', 'v')
videoName = os.getcwd() + '/' + videoName + ".avi"
out = cv2.VideoWriter(videoName,fourcc, 5.0, (int(width),int(height)))
return out, videoName
# converts a frame to HSV, blurs it, masks it to only get the tank by itself
## TO DO: get rid of tank bounds as global variables, include as arguments to this function
def convertToHSV(frame):
# blur image to make color uniform
blurred = cv2.blur(frame,(7,7))
# conver to hsv
hsv = cv2.cvtColor(blurred, cv2.COLOR_BGR2HSV)
# apply mask to get rid of stuff outside the tank
mask = np.zeros((camHeight, camWidth, 3),np.uint8)
# use rectangle bounds for masking
mask[lower_bound:top_bound,left_bound:right_bound] = hsv[lower_bound:top_bound,left_bound:right_bound]
return mask
# returns centroid from largest contour from a binary image
def returnLargeContour(frame,totalVideoPixels):
potential_centroids = []
# find all contours in the frame
contours = cv2.findContours(frame,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)[0]
print "n |
bertrandvidal/stuff | djangoprojects/django_rest_framework/tutorial/snippets/urls.py | Python | unlicense | 568 | 0.001761 | from django.conf.urls import url, include
from snippets import views |
from rest_framework.routers import DefaultRouter
# Create a router and register our viewsets with it.
router = DefaultRouter()
router.register(r'snippets', views.SnippetViewSet)
router.register(r'users', views.UserViewSet)
# The API URLs | are now determined automatically by the router.
# Additionally, we include the login URLs for the browsable API.
urlpatterns = [
url(r'^', include(router.urls)),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework'))
]
|
dufferzafar/mitmproxy | netlib/http/multipart.py | Python | mit | 898 | 0.001114 | import re
from netlib.http import headers
def decode(hdrs, content):
"""
Takes a mul | tipart boundary encoded string and returns list of (key, value) tuples.
"""
v = hdrs.get("content-type")
if v:
v = headers.parse_content_type(v)
if not v:
return []
try:
boundary = v[2]["boundary"].encode("ascii")
except (KeyError, UnicodeError):
return []
rx = re.compile(br'\bname="([^"]+)"')
r = []
for i in content.split(b"--" + boundary):
parts = i.splitline | s()
if len(parts) > 1 and parts[0][0:2] != b"--":
match = rx.search(parts[1])
if match:
key = match.group(1)
value = b"".join(parts[3 + parts[2:].index(b""):])
r.append((key, value))
return r
return []
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.