repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
wzyuliyang/ceph-note | aws2/python/usage_count/putCurrentBucket.py | Python | mit | 656 | 0.012195 | import requests
import l | ogging
from datetime import *
from requests_toolbelt.utils import dump
from awsauth import S3Auth
# host = 'yuliyangdebugwebjewel.tunnel.qydev.com'
host = 'yuliyangdebugweb68.tunnel.qydev.com'
host = '10.254.9.20:7480'
host = '127.0.0.1:7480'
logging.basicConfig(level=logging.DEBUG)
access_key = 'date2'
secret_key = 'date2'
#cmd = '/1034CST'
cmd = '/%sCST' % (str(datetime.now().date())+"."+str(datetime.now().time()).replace(':','-').replace('.','-'),)
url = 'http://%s | %s' % (host,cmd)
response = requests.put(url,auth=S3Auth(access_key, secret_key,service_url=host))
data = dump.dump_all(response)
print(data.decode('utf-8'))
|
RedhawkSDR/framework-codegen | redhawk/packagegen/templates/__init__.py | Python | lgpl-3.0 | 847 | 0.002361 | #
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is p | art of REDHAWK code-generator.
#
# REDHAWK code-generator is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# REDHAWK code-generator is distrib | uted in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
|
Vayne-Lover/Python | dictionary/test1.py | Python | apache-2.0 | 828 | 0.066425 | #!/usr/local/bin/python
#items=[('a','b'),(1,2)]
#b=dict(items)
#print b
#c=dict(name='c',age=42)
#print c
#print len(c)
#c['sex']='female'
#print c
#del c['age']
#print c
#print 'sex' in c
#c['age']=25
#print c
#print c.clear()
#print c
#x={'name':'a','age':'14'}
#y=x
#print y
#z=y.copy()
#z.clear()
#print x
#print y
#print z
#x={'name':'a','age':14}
#y=x.copy()
#y['age']=25
#print x
#print y
#a={}
#b=a.f | romkeys(['name','age'],'(hahaha)')
#print b
#print b.get('name')
#print b.get('hi','N/A')
#c={}
#print c.has_key('name')
#c['name']='Eric'
#print c.has_key('name')
#x={'name':'a','age':'14'}
#print x.items()
#print x.pop('age')
#print x
#y={}
#print y.setdefault('name','N/A')
#print y
#y['name']='Apple'
#y.setdefault('name','N/A')
#print y
x={'a':'1','b':'2','c':'3'}
y={'c':'5'}
x.update(y)
print x
prin | t x.values()
|
abrt/faf | src/pyfaf/storage/migrations/versions/1b264b21ca91_add_semrel_to_build.py | Python | gpl-3.0 | 2,436 | 0.002874 | # Copyright (C) 2014 ABRT Team
# Copyright (C) 2014 Red Hat, Inc.
#
# This file is part of faf.
#
# faf is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# faf is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with faf. If not, see <http://www.gnu.org/licenses/>.
"""
Add semrel to build
Revision ID: 1b264b21ca91
Revises: 4ff13674a015
Create Date: 2015-03-02 14:59:34.502070
"""
from alembic.op import add_column, get_bind, alter_column, drop_column, create_index
import sqlalchemy as sa
from pyfaf.storage import custom_types #pylint: disable=import-error
# revision identifiers, used by Alembic.
revision = "1b264b21ca91"
down_revision = "4ff13674a015"
metadata = sa.MetaData()
def upgrade() -> None:
add_column("builds", sa.Column("semrel", custom_types.Semver(),
nullable=True))
build = sa.Table("builds", metadata,
sa.Column("id", sa.Integer),
sa.Column("base_package_name", sa.String(length=64)),
sa.Column("projrelease_id" | , sa.Integer),
sa.Column("epoch", sa.Integer),
sa.Column("version", sa.String(length=64)),
sa.Column("release", sa.String(length=64)),
| sa.Column("semver", custom_types.Semver()),
sa.Column("semrel", custom_types.Semver()),
)
for b in get_bind().execute(sa.select([build.c.id, build.c.release])):
bid, brel = b
brel = custom_types.to_semver(brel)
get_bind().execute((build.update() #pylint: disable=no-value-for-parameter
.where(build.c.id == bid)
.values(semrel=sa.func.to_semver(brel))))
alter_column("builds", sa.Column("semrel", custom_types.Semver(),
nullable=False))
create_index("ix_builds_semrel", "builds", ["semrel"])
def downgrade() -> None:
drop_column("builds", "semrel")
|
mark-burnett/filament-dynamics | actin_dynamics/numerical/zero_crossings.py | Python | gpl-3.0 | 1,179 | 0.000848 | # Copyright (C) 2011 Mark Burnett
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License | , or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy
from . import interpolation
def zero_cro | ssings(x, y, interpolate=True):
zero_crossings = numpy.where(numpy.diff(numpy.sign(y)))[0]
if not interpolate:
return [x[i] for i in zero_crossings]
results = []
for i in zero_crossings:
results.append(interpolation.linear_project(y[i], x[i],
y[i+1], x[i+1],
0))
return results
|
jabbalaci/FirefoxChecker | systray.py | Python | mit | 4,260 | 0.000704 | #!/usr/bin/env python2
# encoding: utf-8
"""
A system tray icon indicates whether Firefox is running or not.
I use Firefox and I have lots of tabs opened in it (I'm lazy to delete
the old ones). As a result, when I close Firefox before shutting down
my machine, Firefox needs several seconds to fully close (on one of
my machines it's sometimes 20 seconds). The Firefox window disappears but
the process is still in the memory. If I shut down my machine at this time,
next time I reboot the machine and start Firefox I get a recovery message that
asks if I want to restore the tabs since FF wasn't shut down tidily.
So after closing FF I used to start the command "top" to monitor when FF
falls out of the memory. It was boring.
So I wrote this little program. It puts an icon in the system tray and indicates
if FF is running. The icon is colored if FF is running, otherwise it turns
grayscale.
Author: Laszlo Szathmary, alias Jabba Laci, 2016
Email: jabba.laci@gmail.com
GitHub: https://github.com/jabbalaci/FirefoxChecker
"""
from __future__ import (absolute_import, division,
print_function, unicode_literals)
import sys
from time import sleep
import psutil
from PySide import QtGui
from PySide.QtCore import QThread, Signal
import systra | y_rc
#PROCESS_NAME = 'gedit' # for testing
PROCESS_NAME | = 'firefox'
WAIT = 1.0
def is_process_running(name):
"""
Tell if a process is running.
The proc object is cached so it doesn't need to be looked up every time.
"""
if not hasattr(is_process_running, "proc"):
is_process_running.proc = None # it doesn't exist yet, so init it
if is_process_running.proc:
if is_process_running.proc.is_running():
return True
else:
is_process_running.proc = None
return False
else:
for p in psutil.process_iter():
if p.name() == name:
is_process_running.proc = p
return True
#
return False
class FirefoxCheckThread(QThread):
changeIcon = Signal(int)
def __init__(self, parent):
super(FirefoxCheckThread, self).__init__(parent)
self.parent = parent
self.go = True
def stop(self):
self.go = False
sleep(WAIT + 0.1)
def run(self):
while self.go:
if is_process_running(PROCESS_NAME):
self.changeIcon.emit(0)
else:
self.changeIcon.emit(1)
#
sleep(WAIT)
class Window(QtGui.QDialog):
def __init__(self):
super(Window, self).__init__()
self.collectIcons()
self.createActions()
self.createTrayIcon()
self.currentIcon = None # not yet set
if is_process_running(PROCESS_NAME):
self.setIcon(0)
else:
self.setIcon(1)
self.trayIcon.show()
self.firefoxCheckThread = FirefoxCheckThread(self)
self.firefoxCheckThread.start()
self.firefoxCheckThread.changeIcon.connect(self.setIcon)
def setIcon(self, index):
if index == self.currentIcon:
return
# else
icon = self.icons[index]
self.trayIcon.setIcon(icon)
self.currentIcon = index
def collectIcons(self):
self.icons = []
self.icons.append(QtGui.QIcon(':/images/firefox.svg'))
self.icons.append(QtGui.QIcon(':/images/firefox_bw.svg'))
def createActions(self):
self.quitAction = QtGui.QAction("&Quit", self, triggered=self.myQuit)
def myQuit(self):
self.firefoxCheckThread.stop()
QtGui.qApp.quit()
def createTrayIcon(self):
self.trayIconMenu = QtGui.QMenu(self)
self.trayIconMenu.addAction(self.quitAction)
self.trayIcon = QtGui.QSystemTrayIcon(self)
self.trayIcon.setContextMenu(self.trayIconMenu)
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
if not QtGui.QSystemTrayIcon.isSystemTrayAvailable():
QtGui.QMessageBox.critical(None, "Systray",
"I couldn't detect any system tray on this system.")
sys.exit(1)
QtGui.QApplication.setQuitOnLastWindowClosed(False)
window = Window()
sys.exit(app.exec_())
|
vtemian/buffpy | examples/profiles.py | Python | mit | 878 | 0 | from pprint import pprint as pp
from colorama import Fore
from buffpy.api import API
from buffpy.managers.profiles import Profiles
# check http://bufferapp.com/developers/apps to retrieve a token
# or generate one with the example
token = "awesome_token"
# instantiate the api object
api = API(client_id="client_id",
client_secret="clien | t_secret",
access_token=token)
# get all profiles
profiles = Profiles(api=api)
print(profiles.all())
# filter profiles using some criteria
profile = Profiles(api=api).filter(service="twitter")[0]
print(profile)
# get schedules of my twitter profile
profile = Profiles(api=api).filter(service | ="twitter")[0]
print(profile.schedules)
# update schedules times for my twitter profile
profile = Profiles(api=api).filter(service="twitter")[0]
profile.schedules = {
"days": ["tue", "thu"],
"times": ["13:45"]
}
|
lumig242/Video-Share-System | comments/models.py | Python | mit | 335 | 0.00597 | from | django.db import models
from django.contrib.auth.models import User
from video.models import Video
# Create your models here.
class Comment(models.Model):
author = models.ForeignKey(User)
video = models.ForeignKey(Video)
content = models.TextField()
time = models.DateTimeField(auto_now= | True, auto_now_add=True)
|
eduNEXT/edunext-platform | import_shims/studio/contentstore/rest_api/v1/serializers.py | Python | agpl-3.0 | 431 | 0.009281 | """Deprecated import support. Auto-generated by import_shims/generate_shims.sh."""
# pylint: disable=redefined-builtin,wrong-import-position,wildcard-import,useless-suppression,line-too-long
from import_shims.warn import warn_deprecated_impo | rt
warn_deprecated_import('contentstore.rest_api.v1.serializers', 'cm | s.djangoapps.contentstore.rest_api.v1.serializers')
from cms.djangoapps.contentstore.rest_api.v1.serializers import *
|
messagebird/python-rest-api | messagebird/validation.py | Python | bsd-2-clause | 173 | 0 | from messagebird.error | import ValidationError
def validate_is_not_blank(value, message):
if value is None or not value.strip():
| raise ValidationError(message)
|
cltrudeau/django-yacon | yacon/tests/test_site.py | Python | mit | 4,408 | 0.002722 | from django.test import TestCase
from yacon.models.common import Language
from yacon.models.site import Site, ParsedPath
from yacon.models.hierarchy import BadSlug
# ============================================================================
class SiteTests(TestCase):
def test_hierarchy(self):
british = Language.factory(name='GB English', identifier='en-gb')
french = Language.factory(name='French', identifier='fr')
# create a test site
site = Site.create_site('Test Site', 'foo', [british, french])
self.assertTrue(site)
# test languages were created properly
lang = site.default_language
self.assertEquals(lang, british)
langs = site.get_languages()
self.assertEquals(len(langs), 2)
self.assertTrue(british in langs)
self.assertTrue(french in langs)
langs = site.get_languages('en')
self.assertEquals(len(langs), 1)
self.assertTrue(british in langs)
# test adding and retrieving config
site.add_config('foo', 'bar')
values = site.get_config('foo')
self.assertEquals(values[0], 'bar')
self.assertEquals(len(values), 1)
# add some child nodes
child1 = site.doc_root.create_child('Child1', 'child1', {\
french:('Enfant1', 'enfant1')})
self.assertTrue(child1)
child2 = site.doc_root.create_child('Child2', 'child2', {\
french:('Enfant2', 'enfant2')})
self.assertTrue(child2)
grandchild1 = child1.create_child('Grandchild1', 'grandchild1', {\
french:('Grandenfant1', 'grandenfant1')})
self.assertTrue(grandchild1)
grandchild2 = child1.create_child('Grandchild2', 'grandchild2', {\
french:('Grandenfant2', 'grandenfant2')})
self.assertTrue(grandchild2)
# attempt to add with a bad slug
self.assertRaises(BadSlug, site.doc_root.create_child, name='Child1',
slug='foo bar')
# search for some paths, testing leading and trailing slashes ignored
# properly and that right things are returned
pp = site.parse_path('child1')
self.assertEquals(pp.node, child1)
self.assertEquals(pp.language, british)
pp = site.parse_path('/child1')
self.assertEquals(pp.node, child1)
self.assertEquals(pp.language, british)
pp = site.parse_path('/child1/')
self.assertEquals(pp.node, child1)
self.assertEquals(pp.language, british)
pp = site.parse_path('/child1/grandchild2')
self.assertEquals(pp.node, grandchild2)
self.assertEquals(pp.language, british)
# search for some paths using something besides default lang
pp = site.parse_path('/enfant1/')
self.assertEquals(pp.node, child1)
self.assertEquals(pp.language, french)
pp = site.parse_path('/enfant1/grandenfant2')
self.assertEquals(pp.no | de, grandchild2)
self.assertEquals(pp.language, french)
# test path parser with a mismatched path
pp = site.parse_path('/foo')
self.assertEquals(pp.path, '/foo')
self.assertEquals(pp.slugs_in_path, [])
self.assertEquals(pp.slugs_after_item, ['foo'])
self.assertEquals(pp.node, None)
self.assertEquals(pp.page, None)
self.assertEquals(pp.language, None)
self.assertEquals | (pp.item_type, ParsedPath.UNKNOWN)
# test path parser with a good path, including bits past the node
parsed = site.parse_path('/child1/grandchild2/foo/b')
self.assertEquals(parsed.path, '/child1/grandchild2/foo/b')
self.assertEquals(parsed.slugs_in_path, ['child1', 'grandchild2'])
self.assertEquals(parsed.slugs_after_item, ['foo', 'b'])
self.assertEquals(parsed.node, grandchild2)
# test tree printing
test_string = \
"""root (/)
Child1 (child1)
Grandchild1 (grandchild1)
Grandchild2 (grandchild2)
Child2 (child2)"""
string = site.doc_root.tree_to_string()
self.assertEquals(test_string, string)
# test getting the path from a node
test_string = "/child1/"
string = child1.node_to_path()
self.assertEquals(test_string, string)
test_string = "/child1/grandchild2/"
string = grandchild2.node_to_path()
self.assertEquals(test_string, string)
|
frogbywyplay/genbox_xbuilder | xbuilder/consts.py | Python | gpl-2.0 | 1,713 | 0 | #!/usr/bin/python
#
# Copyright (C) 2006-2018 Wyplay, All Rights Reserved.
# This file is part of xbuilder.
#
# xbuilder is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# xbuilder is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; see file COPYING.
# If not, see <http://www.gnu.org/licenses/>.
#
#
import os
XBUILDER_SYS_CFG = '/etc/xbuilder.conf'
XBUILDER_USER_CFG = os.path.expanduser('~/.xbuilder.conf')
XB | UILDER_LOGFILE = 'build.log'
XBUILDER_REPORT_FILE = 'report.xml.bz2'
XBUILDER_DEFTYPE = 'beta'
XBUILDER_TARGET_COMMIT = False
# Default values that can be modified in the config file
# Keep only N beta releases, put 0 to disable
XBUILDER_MAX_BETA_TARGETS = 5
XBUILDER_CLEAN_WORKDIR = True
XBUILDER_TARGET_C | OMMIT = True
XBUILDER_FEATURES = ''
XBUILDER_WORKDIR = '/usr/targets/xbuilder'
XBUILDER_ARCHIVE_DIR = '/opt/xbuilder'
XBUILDER_COMPRESSION = 'xz'
XBUILDER_MAIL_FROM = 'builder@wyplay.com'
XBUILDER_MAIL_TO = 'integration@wyplay.com'
XBUILDER_MAIL_SMTP = 'localhost'
XBUILDER_MAIL_LOG_SIZE = 20 * 1024
XBUILDER_MAIL_URI = 'http://localhost/genbox-ng/xbuilder'
XBUILDER_NOTIFIER_URI = 'http://localhost:9999/xbuilder'
XBUILDER_TYPES = ['beta', 'release']
XBUILDER_GPG_LOGFILE = 'gpg.log'
XBUILDER_GPG_LOGLEVEL = 20 # logging.INFO
|
lisa-lab/pylearn2 | pylearn2/models/gsn.py | Python | bsd-3-clause | 36,325 | 0.001514 | """
Generative Stochastic Networks
This is described in:
- "Generalized Denoising Auto-Encoders as Generative Models" Bengio, Yao, Alain,
Vincent. arXiv:1305.6663
- "Deep Generative Stochastic Networks Trainable by Backprop" Bengio,
Thibodeau-Laufer. arXiv:1306.1091
There is an example of training both unsupervised and supervised GSNs on MNIST
in pylearn2/scripts/gsn_example.py
"""
__authors__ = "Eric Martin"
__copyright__ = "Copyright 2013, Universite de Montreal"
__license__ = "3-clause BSD"
import copy
import functools
import warnings
import numpy as np
from theano.compat.six.moves import xrange
import theano
T = theano.tensor
from pylearn2.blocks import StackedBlocks
from pylearn2.expr.activations import | identity
from pylearn2.models.autoencoder import Autoencoder
from pylearn2.models.model import Model
from pylearn2.utils import safe_zip
# Enforce correct restructured text list format.
# Be sure to re-run docgen.py and make sure there are no wa | rnings if you
# modify the module-level docstring.
assert """:
- """ in __doc__
class GSN(StackedBlocks, Model):
"""
.. todo::
WRITEME
Parameters
----------
autoencoders : list
A list of autoencoder objects. As of now, only the functionality
from the base Autoencoder class is used.
preact_cors : list
A list of length len(autoencoders) + 1 where each element is a
callable (which includes Corruptor objects). The callable at
index i is called before activating the ith layer. Name stands
for "preactivation corruptors".
postact_cors : list
A list of length len(autoencoders) + 1 where each element is a
callable (which includes Corruptor objects). The callable at
index i is called directly after activating the ith layer. Name
stands for "postactivation corruptors". The valid values for this
parameter are the same as that for preact_cors.
layer_samplers: list
Describes how to sample from each layer. Sampling occurs directly
before the post activation corruption is applied. Valid values
for this argument are of the same form as valid parameters for
preact_cor and postact_cor (and if an element in the list is
None, no sampling will be applied at that layer). Note: as of
right now, we've only experimented with sampling at the visible
layer.
Notes
-----
Most of the time it will be much easier to construct a GSN using
GSN.new rather than GSN.__init__. This method exists to make the GSN
class very easy to modify.
The activation function for the visible layer is the "act_dec" function
on the first autoencoder, and the activation function for the i_th
hidden layer is the "act_enc" function on the (i - 1)th autoencoder.
"""
def __init__(self, autoencoders, preact_cors=None, postact_cors=None,
layer_samplers=None):
super(GSN, self).__init__(autoencoders)
# only for convenience
self.aes = self._layers
# easy way to turn off corruption (True => corrupt, False => don't)
self._corrupt_switch = True
# easy way to turn off sampling
self._sample_switch = True
# easy way to not use bias (True => use bias, False => don't)
self._bias_switch = True
# check that autoencoders are the correct sizes by looking at previous
# layer. We can't do this for the first ae, so we skip it.
for i in xrange(1, len(self.aes)):
assert (self.aes[i].weights.get_value().shape[0] ==
self.aes[i - 1].nhid)
# do some type checking and convert None's to identity function
def _make_callable_list(previous):
"""
.. todo::
WRITEME
"""
if len(previous) != self.nlayers:
raise ValueError("Need same number of corruptors/samplers as layers")
if not all(map(lambda x: callable(x) or x is None, previous)):
raise ValueError("All elements must either be None or be a callable")
return map(lambda x: identity if x is None else x, previous)
self._preact_cors = _make_callable_list(preact_cors)
self._postact_cors = _make_callable_list(postact_cors)
self._layer_samplers = _make_callable_list(layer_samplers)
@staticmethod
def _make_aes(layer_sizes, activation_funcs, tied=True):
"""
Creates the Autoencoder objects needed by the GSN.
Parameters
----------
layer_sizes : WRITEME
activation_funcs : WRITEME
tied : WRITEME
"""
aes = []
assert len(activation_funcs) == len(layer_sizes)
for i in xrange(len(layer_sizes) - 1):
# activation for visible layer is aes[0].act_dec
act_enc = activation_funcs[i + 1]
act_dec = act_enc if i != 0 else activation_funcs[0]
aes.append(
Autoencoder(layer_sizes[i], layer_sizes[i + 1],
act_enc, act_dec, tied_weights=tied)
)
return aes
@classmethod
def new(cls,
layer_sizes,
activation_funcs,
pre_corruptors,
post_corruptors,
layer_samplers,
tied=True):
"""
An easy (and recommended) way to initialize a GSN.
Parameters
----------
layer_sizes : list
A list of integers. The i_th element in the list is the size of
the i_th layer of the network, and the network will have
len(layer_sizes) layers.
activation_funcs : list
activation_funcs must be a list of the same length as layer_sizes
where the i_th element is the activation function for the i_th
layer. Each component of the list must refer to an activation
function in such a way that the Autoencoder class recognizes the
function. Valid values include a callable (which takes a symbolic
tensor), a string that refers to a Theano activation function, or
None (which gives the identity function).
preact_corruptors : list
preact_corruptors follows exactly the same format as the
activations_func argument.
postact_corruptors : list
postact_corruptors follows exactly the same format as the
activations_func argument.
layer_samplers : list
layer_samplers follows exactly the same format as the
activations_func argument.
tied : bool
Indicates whether the network should use tied weights.
Notes
-----
The GSN classes applies functions in the following order:
- pre-activation corruption
- activation
- clamping applied
- sampling
- post-activation corruption
All setting and returning of values occurs after applying the
activation function (or clamping if clamping is used) but before
applying sampling.
"""
args = [layer_sizes, pre_corruptors, post_corruptors, layer_samplers]
if not all(isinstance(arg, list) for arg in args):
raise TypeError("All arguments except for tied must be lists")
if not all(len(arg) == len(args[0]) for arg in args):
lengths = map(len, args)
raise ValueError("All list arguments must be of the same length. " +
"Current lengths are %s" % lengths)
aes = cls._make_aes(layer_sizes, activation_funcs, tied=tied)
return cls(aes,
preact_cors=pre_corruptors,
postact_cors=post_corruptors,
layer_samplers=layer_samplers)
@functools.wraps(Model.get_params)
def get_params(self):
"""
.. todo::
WRITEME
"""
params = set()
for ae in self.aes:
params.update(ae.get_params())
return list(params)
@property
def nlayers(self):
"""
|
mrmiguez/citrus_harvest | citrus_harvest/pyoaiharvester/pyoaiharvest.py | Python | mit | 4,976 | 0.002814 | #!/usr/bin/env python2
#created by Mark Phillips - https://github.com/vphill
import sys
import urllib2
import zlib
import time
import re
import xml.dom.pulldom
import operator
import codecs
from optparse import OptionParser
nDataBytes, nRawBytes, nRecoveries, maxRecoveries = 0, 0, 0, 3
def getFile(serverString, command, verbose=1, sleepTime=0):
global nRecoveries, nDataBytes, nRawBytes
if sleepTime:
time.sleep(sleepTime)
remoteAddr = serverString + '?verb=%s' % command
#print remoteAddr #test
if verbose:
print "\r", "getFile ...'%s'" % remoteAddr[-90:]
headers = {'User-Agent': 'OAIHarvester/2.0', 'Accept': 'text/html',
'Accept-Encoding': 'compress, deflate'}
try:
#remoteData=urllib2.urlopen(urllib2.Request(remoteAddr, None, headers)).read()
remoteData = urllib2.urlopen(remoteAddr).read()
except urllib2.HTTPError, exValue:
if exValue.code == 503:
retryWait = int(exValue.hdrs.get("Retry-After", "-1"))
if retryWait < 0:
return None
print 'Waiting %d seconds' % retryWait
return getFile(serverString, command, 0, retryWait)
print exValue
if nRecoveries < maxRecoveries:
nRecoveries += 1
return getFile(serverString, command, 1, 60)
return
nRawBytes += len(remoteData)
try:
remoteData = zlib.decompressobj().decompress(remoteData)
except:
pass
nDataBytes += len(remoteData)
mo = re.search('<error *code=\"([^"]*)">(.*)</error>', remoteData)
if mo:
print "OAIERROR: code=%s '%s'" % (mo.group(1), mo.group(2))
else:
return remoteData
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-l", "--link", dest="link", help="URL of repository")
parser.add_option("-o", "--filename", dest="filename", help="write repository to file")
parser.add_option("-f", "--from", dest="fromDate", help="harvest records from this date yyyy-mm-dd")
parser.add_option("-u", "--until", dest="until", help="harvest records until this date yyyy-mm-dd")
parser.add_option("-m", "--mdprefix", dest="mdprefix", default="oai_dc", help="use the specified metadata format")
parser.add_option("-s", "--setName", dest="setName", help="harvest the specified set")
(options, args) = parser.parse_args()
if options.link is None or options.filename is None:
parser.print_help()
parser.error("a repository url and output file are required")
if options:
serverString = verbOpts = fromDate = untilDate = mdPrefix = oaiSet = ''
if options.link:
serverString = options.link
if options.filename:
outFileName = options.filename
if options.fromDate:
fromDate = options.fromDate
if options.until:
untilDate = options.until
if options.mdprefix: |
mdPrefix = options.mdp | refix
if options.setName:
oaiSet = options.setName
else:
print usage
if not serverString.startswith('http'):
serverString = 'http://' + serverString
print "Writing records to %s from archive %s" % (outFileName, serverString)
ofile = codecs.lookup('utf-8')[-1](file(outFileName, 'wb'))
ofile.write('<repository xmlns="http://www.openarchives.org/OAI/2.0/" \
xmlns:oai="http://www.openarchives.org/OAI/2.0/" \
xmlns:oai_dc="http://www.openarchives.org/OAI/2.0/oai_dc/" \
xmlns:dc="http://purl.org/dc/elements/1.1/" \
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" \
xsi:schemaLocation="http://www.openarchives.org/OAI/2.0/ http://www.openarchives.org/OAI/2.0/OAI-PMH.xsd">\n') # wrap list of records with this
if oaiSet:
verbOpts += '&set=%s' % oaiSet
if fromDate:
verbOpts += '&from=%s' % fromDate
if untilDate:
verbOpts += '&until=%s' % untilDate
if mdPrefix:
verbOpts += '&metadataPrefix=%s' % mdPrefix
else:
verbOpts += '&metadataPrefix=%s' % 'oai_dc'
print "Using url:%s" % serverString + '?verb=ListRecords' + verbOpts
data = getFile(serverString, 'ListRecords' + verbOpts)
recordCount = 0
while data:
events = xml.dom.pulldom.parseString(data)
for (event, node) in events:
if event == "START_ELEMENT":
if node.tagName == 'record' or node.tagName == 'oai:record':
events.expandNode(node)
node.writexml(ofile)
recordCount += 1
mo = re.search('[^ ][oai:]?resumptionToken[^/]*>(.*?)</', data)
if not mo:
break
data = getFile(serverString, "ListRecords&resumptionToken=%s" % mo.group(1))
ofile.write('\n</repository>\n'), ofile.close()
print "\nRead %d bytes (%.2f compression)" % (nDataBytes, float(nDataBytes) / nRawBytes)
print "Wrote out %d records" % recordCount
|
plotly/python-api | packages/python/plotly/plotly/validators/surface/_meta.py | Python | mit | 480 | 0 | import _plotly_utils.basevalidators
class MetaValidator(_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="meta", parent_name="surface", **kwarg | s):
super(Me | taValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ok", True),
edit_type=kwargs.pop("edit_type", "plot"),
role=kwargs.pop("role", "info"),
**kwargs
)
|
Daniel-CA/odoo-addons | product_stock_on_hand/models/product.py | Python | agpl-3.0 | 1,953 | 0 | # -*- coding: utf-8 -*-
# Copyright 2017 Ainara Galdona - AvanzOSC
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, fields, api
from openerp.addons import decimal_precision as dp
class ProductTemplate(models.Model):
_inherit = 'product.template'
@api.depends('product_variant_ids', 'product_variant_ids.quant_ids',
'product_variant_ids.quant_ids.qty',
'product_variant_ids.quant_ids.location_id',
'product_variant_ids.quant_ids.location_id.stock_on_hand',
'product_variant_ids.quant_ids.reservation_id')
@api.multi
def _compute_stock_on_hand(self):
for record in self:
record.stock_on_hand = sum(
record.mapped('product_variant_ids.quant_ids').filtered(
lambda x: x.location_id.stock_on_hand and
not x.reservation_id).mapped('qty'))
stock_on_hand = fields.Float(
string="Stock On Hand", store=True, compute="_compute_stock_on_hand",
digits=dp.get_precision('Product Unit of Measure'))
class ProductProduct(models.Model):
_inherit = 'product.product'
@api.depends('quant_ids', 'quant_ids.qty', 'quant_ids.location_id',
'quant_ids.location_id.stock_on_hand',
'quant_ids.reservation_id')
@api.multi
| def _compute_stock_on_hand(self):
for record in self:
record.stock_on_hand = sum(
record.quant_ids.filtered(
lambda x: x.location_id.stock_on_hand and
not x.reservation_id).mapped('qty'))
stock_on_hand = fields.Float(
string="Stock On Hand", store=True, compute="_compute_stock_on_hand",
digits=dp.get_precision('Product Unit of Measure'))
quant | _ids = fields.One2many(comodel_name='stock.quant',
inverse_name='product_id', string='Quants')
|
owtf/owtf | owtf/plugins/web/external/Testing_for_SSL-TLS@OWTF-CM-001.py | Python | bsd-3-clause | 305 | 0 | from owtf.managers.resource import get_resources
from owtf.plugin.helper import plugin_helper
DESCRIPTION = "Plugin to assist manual testing"
def run(PluginInfo):
resource = get_resources("Externa | lSSL")
Content = plugin_h | elper.resource_linklist("Online Resources", resource)
return Content
|
xran-deex/Toury-Django | Toury/admin.py | Python | mit | 91 | 0 | f | rom django.contrib import admin
from Toury.models import *
admin.site.regis | ter(Marker)
|
NECCSiPortal/NECCSPortal-dashboard | nec_portal/dashboards/admin/history/views.py | Python | apache-2.0 | 8,004 | 0 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""
Views for managing operation logs.
"""
from datetime import date
from datetime import datetime
try:
import urllib2
except Exception:
import urllib.request as urllib2
from django.core.urlresolvers import reverse_lazy
from django.http import Http404
from django.http import HttpResponseForbidden
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy as _
from horizon import forms
from nec_portal.dashboards.admin.history import forms as history_forms
from nec_portal.local import nec_portal_settings
# iframe src for admin/history.
ADMIN_HISTORY_FRAME = getattr(nec_portal_settings, 'ADMIN_HISTORY_FRAME', '')
DATE_FORMAT = '%Y-%m-%d'
DATETIME_FORMAT = '%Y-%m-%d %H:%M:%S'
START_TIME = ' 00:00:00'
END_TIME = ' 23:59:59'
QUERY_ADD_START = ' AND ('
QUERY_ADD_END = ')'
QUERY_START_MILISEC = '.000Z'
QUERY_END_MILISEC = '.999Z'
# Dictionary for query format
QUERY_DICTIONARY = {
'search': '',
'start': '',
'end': '',
}
SESSION_HISTORY_KEY = 'admin_history'
# Dictionary for session
SESSION_DICTIONARY = {
'search': '',
'start': '',
'end': '',
}
class IndexView(forms.ModalFormView):
form_class = history_forms.HistoryForm
form_id = "history_modal"
modal_header = ""
modal_id = "history_modal"
page_title = _("Operation Log")
submit_label = _("Filter")
submit_url = reverse_lazy("horizon:admin:history:index")
template_name = 'admin/history/index.html'
def get_initial(self):
request = self.request
# Initialize value
search_value = ''
default_term = get_default_term()
start_value = default_term[0]
end_value = default_term[1]
referer_url = request.META.get('HTTP_REFERER')
if SESSION_HISTORY_KEY not in request.session.keys():
SESSION_DICTIONARY['search'] = search_value
SESSION_DICTIONARY['start'] = start_value
SESSION_DICTIONARY['end'] = end_value
request.session[SESSION_HISTORY_KEY] = SESSION_DICTIONARY
session = request.session.get(SESSION_HISTORY_KEY, SESSION_DICTIONARY)
if request.method == 'POST':
# When request's method is POST, value get from POST data.
if 'search' in request.POST.keys():
search_value = request.POST['search'].encode('utf-8')
else:
search_value = session.get('search', search_value)
if 'start' in request.POST.keys():
start_value = request.POST['start'] + (
START_TIME if str(request.POST['start']) else '')
else:
start_value = session.get('start', start_value)
if 'end' in request.POST.keys():
end_value = request.POST['end'] + (
END_TIME if str(request.POST['end']) else '')
else:
end_value = session.get('end', end_value)
SESSION_DICTIONARY['search'] = search_value
if sta | rt_value == '' or end_value == '':
start_value = default_term[0]
end_value = default_term[1]
SESSION_DICTIONARY['start'] = start_value
| SESSION_DICTIONARY['end'] = end_value
request.session[SESSION_HISTORY_KEY] = SESSION_DICTIONARY
elif referer_url is not None and request.path in referer_url:
# When reload screen, value get from session data.
search_value = session.get('search', search_value)
start_value = session.get('start', start_value)
end_value = session.get('end', end_value)
if (not start_value) or (not end_value):
start_value = default_term[0] + DATETIME_FORMAT
end_value = default_term[1] + DATETIME_FORMAT
return {
'search': search_value,
'start': datetime.strptime(start_value, DATETIME_FORMAT).date(),
'end': datetime.strptime(end_value, DATETIME_FORMAT).date(),
}
def form_valid(self, form):
return form.handle(self.request, form.cleaned_data)
class DetailView(forms.ModalFormView):
form_class = history_forms.HistoryForm
form_id = "history_modal"
modal_header = ""
modal_id = "history_modal"
page_title = _("Operation Log")
submit_label = _("Filter")
submit_url = reverse_lazy("horizon:admin:history:index")
template_name = 'admin/history/index.html'
def dispatch(self, request, *args, **kwargs):
if request.META.get('HTTP_REFERER') is None:
raise Http404
if not request.user.is_authenticated():
return HttpResponseForbidden()
if ADMIN_HISTORY_FRAME == '':
raise Http404
# Initialize dictionary
QUERY_DICTIONARY['search'] = ''
QUERY_DICTIONARY['start'] = ''
QUERY_DICTIONARY['end'] = ''
session = request.session.get(SESSION_HISTORY_KEY,
SESSION_DICTIONARY)
if not session['search'] == '':
QUERY_DICTIONARY['search'] = \
urllib2.quote(
QUERY_ADD_START + session['search'] + QUERY_ADD_END)
if session['start'] == '' or session['end'] == '':
default_term = get_default_term()
start_datetime = datetime.strptime(default_term[0],
DATETIME_FORMAT)
end_datetime = datetime.strptime(default_term[1],
DATETIME_FORMAT)
session['start'] = default_term[0]
session['end'] = default_term[1]
else:
start_datetime = datetime.strptime(
session['start'],
DATETIME_FORMAT
)
end_datetime = datetime.strptime(session['end'], DATETIME_FORMAT)
if start_datetime > end_datetime:
default_term = get_default_term()
start_datetime = datetime.strptime(default_term[0],
DATETIME_FORMAT)
end_datetime = datetime.strptime(default_term[1],
DATETIME_FORMAT)
session['start'] = default_term[0]
session['end'] = default_term[1]
QUERY_DICTIONARY['start'] = \
urllib2.quote(
start_datetime.strftime(DATETIME_FORMAT).replace(' ', 'T')
+ QUERY_START_MILISEC)
QUERY_DICTIONARY['end'] = \
urllib2.quote(
end_datetime.strftime(DATETIME_FORMAT).replace(' ', 'T')
+ QUERY_END_MILISEC)
return redirect(ADMIN_HISTORY_FRAME % QUERY_DICTIONARY)
def char_to_int(value):
try:
# Try whether it can be converted into an integer
return int(value)
except ValueError:
return 0
finally:
pass
def get_default_term():
today = date.today()
year = today.year
month = today.month
months = char_to_int(getattr(nec_portal_settings, 'DEFAULT_PERIOD', '13'))
while True:
if month - months <= 0:
year -= 1
month += 12
else:
month -= months
break
start_value = datetime.strftime(
datetime(year, month, 1).date(),
DATE_FORMAT) + START_TIME
end_value = datetime.strftime(today, DATE_FORMAT) + END_TIME
return [start_value, end_value]
|
sdh11/gnuradio | gr-fec/python/fec/extended_tagged_decoder.py | Python | gpl-3.0 | 6,542 | 0.000611 | #!/usr/bin/env python
#
# Copyright 2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr, blocks, digital
from . import fec_python as fec
from .bitflip import read_bitlist
class extended_tagged_decoder(gr.hier_block2):
# solution to log_(1-2*t)(1-2*.0335) = 1/taps where t is thresh (syndrome density)
# for i in numpy.arange(.1, .499, .01):
# print(str(log((1-(2 * .035)), (1-(2 * i)))) + ':' + str(i);)
garbletable = {
0.310786835319: 0.1,
0.279118162802: 0.11,
0.252699589071: 0.12,
0.230318516016: 0.13,
0.211108735347: 0.14,
0.194434959095: 0.15,
0.179820650401: 0.16,
0.166901324951: 0.17,
0.15539341766: 0.18,
0.145072979886: 0.19,
0.135760766313: 0.2,
0.127311581396: 0.21,
0.119606529806: 0.22,
0.112547286766: 0.23,
0.106051798775: 0.24,
0.10005101381: 0.25,
0.0944863633098: 0.26,
0.0893078003966: 0.27,
0.084472254501: 0.28,
0.0799424008658: 0.29,
0.0756856701944: 0.3,
0.0716734425668: 0.31,
0.0678803831565: 0.32,
0.0642838867856: 0.33,
0.0608636049994: 0.34,
0.0576010337489: 0.35,
0.0544791422522: 0.36,
0.0514820241933: 0.37,
0.0485945507251: 0.38,
0.0458019998183: 0.39,
0.0430896262596: 0.4,
0.0404421166935: 0.41,
0.0378428350972: 0.42,
0.0352726843274: 0.43,
0.0327082350617: 0.44,
0.0301183562535: 0.45,
0.0274574540266: 0.46,
0.0246498236897: 0.47,
0.0215448131298: 0.48,
0.0177274208353: 0.49,
}
def __init__(
self,
decoder_obj_list,
ann=None,
puncpat="11",
integration_period=10000,
flush=None,
rotator=None,
lentagname=None,
mtu=1500,
):
gr.hier_block2.__init__(
self,
"extended_decoder",
gr.io_signature(1, 1, gr.sizeof_float),
gr.io_signature(1, 1, gr.sizeof_char),
)
self.blocks = []
self.ann = ann
self.puncpat = puncpat
self.flush = flush
if isinstance(decoder_obj_list, list):
# This block doesn't handle parallelism of > 1
# We could just grab encoder [0][0], but we don't want to encourage
# this.
if isinstance(decoder_obj_list[0], list):
gr.log.info("fec.extended_tagged_decoder: Parallelism must be 1.")
raise AttributeError
decoder_obj = decoder_obj_list[0]
# Otherwise, just take it as is
else:
decoder_obj = decoder_obj_list
# If lentagname is None, fall back to using the non tagged
# stream version
if isinstance(lentagname, str):
if lentagname.lower() == "none":
lentagname = None
message_collector_connected = False
# anything going through the annihilator needs shifted, uchar vals
if (
fec.get_decoder_input_conversion(decoder_obj) == "uchar"
or fec.get_decoder_input_conversion(decoder_obj) == "packed_bits"
):
self.blocks.append(blocks.multiply_const_ff(48.0))
if fec.get_shift(decoder_obj) != 0.0:
self.blocks.append(blocks.add_const_ff(fec.get_shift(decoder_obj)))
elif fec.get_decoder_input_conversion(decoder_obj) == "packed_bits":
self.blocks.append(blocks.add_const_ff(128.0))
if (
fec.get_decoder_input_conversion(decoder_obj) == "uchar"
or fec.get_decoder_input_conversion(decoder_obj) == "packed_bits"
):
self.blocks.append(blocks.float_to_uchar())
const_index = 0 # index that corresponds to mod order for specinvert purposes
if not self.flush:
flush = 10000
else:
flush = self.flush
if self.ann: # ann and puncpat are strings of 0s and 1s
cat = fec.ULLVector()
for i in fec.read_big_bitlist(ann):
cat.append(i)
synd_garble = 0.49
idx_list = sorted(self.garbletable.keys())
for i in idx_list:
if 1.0 / self.ann.count("1") >= i:
synd_garble = self.garbletable[i]
print(
"using syndrom garble threshold "
+ str(synd_garble)
+ "for conv_bit_corr_bb"
)
print("ceiling: .0335 data garble rate")
self.blocks.append(
fec.conv_bit_corr_bb(
cat,
len(puncpat) - puncpat.count("0"),
len(ann),
integration_period,
flush,
synd_garble,
)
)
if self.puncpat != "11":
self.blocks.append(
fec.depuncture_bb(len(puncpat), read_bitlist(puncpat), 0)
)
if fec.get_decoder_input_conversion(decoder_obj) == "packed_bits":
self.blocks.append(blocks.uchar_to_float())
self.blocks.append(blocks.add_const_ff(-128.0))
self.blocks.append(digital.binary_slicer_fb())
self.blocks.append(blocks.unpacked_to_packed_bb(1, 0))
else:
if not lentagname:
self.blocks.append(
fec.decoder(
decoder_obj,
fec.get_decode | r_input_item_size(decoder_obj),
fec.get_decoder_output_item_size(decoder_obj),
)
)
else:
self.block | s.append(
fec.tagged_decoder(
decoder_obj,
fec.get_decoder_input_item_size(decoder_obj),
fec.get_decoder_output_item_size(decoder_obj),
lentagname,
mtu,
)
)
if fec.get_decoder_output_conversion(decoder_obj) == "unpack":
self.blocks.append(blocks.packed_to_unpacked_bb(1, gr.GR_MSB_FIRST))
self.connect((self, 0), (self.blocks[0], 0))
self.connect((self.blocks[-1], 0), (self, 0))
for i in range(len(self.blocks) - 1):
self.connect((self.blocks[i], 0), (self.blocks[i + 1], 0))
|
thesealion/writelightly | writelightly/tests/base.py | Python | mit | 4,192 | 0.00334 | import curses
import random
class Screen(object):
def __init__(self, maxy, maxx):
self.setmaxyx(maxy, maxx)
self.pos = (0, 0)
def clear(self, *args):
if not args:
y, x = self.getmaxyx()
self.lines = []
for i in range(y):
self.lines.append(list([' '] * x))
else:
y, x, y0, x0 = args
for i in range(y0, y0 + y):
try:
line = self.lines[i]
except IndexError:
# Following lines are out of range, no need to go over them.
# We don't raise an exception here because it's not
# an error in curses.
break
line[x0:x0 + x] = [' '] * x
def keypad(self, val):
pass
def getmaxyx(self):
return self.maxyx
def addstr(self, y, x, s, a=None):
if not s:
return
line = self.lines[y]
sl = slice(x, x + len(s))
if len(line[sl]) != len(s):
raise CursesError('addstr got a too long string: "%s"' % s)
if line[sl][0] != ' ' or len(set(line[sl])) != 1:
if line[sl] != list(s):
raise CursesError('trying to overwrite "%s" with "%s", y: %d' %
(''.join(line[sl]), s, y))
line[sl] = list(s)
def refresh(self):
pass
def getch(self):
return commands.get()
def get_line(self, ind):
return ''.join(self.lines[ind]).strip()
def move(self, y, x):
self.pos = (y, x)
def getyx(self):
return self.pos
def setmaxyx(self, maxy, maxx):
self.maxyx = (maxy, maxx)
self.clear()
class Window(Screen):
def __init__(self, *args):
if len(args) == 2:
y0, x0 = args
sy, sx = get_screen().getmaxyx()
y, x = sy - y0, sx - x0
elif len(args) == 4:
y, x, y0, x0 = args
else:
raise CursesError('Bad arguments for newwin: %s' % args)
self.maxyx = (y, x)
self.begyx = (y0, x0)
self.pos = (0, 0)
def getbegyx(self):
return self.begyx
def resize(self, y, x):
self.maxyx = (y, x)
def addstr(self, y, x, s, a=None):
y0, x0 = self.getbegyx()
get_screen().addstr(y + y0, x + x0, s, a)
def clear(self):
y0, x0 = self.getbegyx()
y, x = self.getmaxyx()
get_screen().clear(y, x, y0, x0)
class CursesError(Exception):
pass
screen = None
def get_screen():
return screen
def initscr():
global screen
screen = Screen(100, 100)
return screen
def newwin(*args):
return Window(*args)
class CommandsManager(object):
def __init__(self):
self.reset()
def reset(self):
self.commands = []
def add(self, command):
if isinstance(command, list):
self.commands += command
else:
self.commands.append(command)
def get(self):
try:
return self.commands.pop(0)
except IndexError:
raise CursesError('Run out of commands')
commands = CommandsManager()
def keyname(code):
ch = chr(code)
if ch == '\n':
return '^J'
return ch
def patch_curses():
if getattr(curses, 'patched', False):
return
curses.initscr = initscr
curses.noecho = lambda: None
curses.cbreak = lambda: None
curses.curs_set = lambda val: None
curses.newwin = newwin
curses.nocbreak = lambda: None
curses.echo = lambda: None
curses.endwin = lambda: None
curses.keyn | ame = keyname
curses.start_color = lambda: None
curses.patched = True
def get_ran | dom_lines(num=None, width=None):
letters = [chr(c) for c in range(97, 123)] + [' ']
if not num or not width:
y, x = get_screen().getmaxyx()
if not num:
num = y + 50
if not width:
width = x - 1
def get_line():
line = ''.join([random.choice(letters)
for j in range(random.randint(1, width))]).strip()
return line or get_line()
return [get_line() for i in range(num)]
|
developerQuinnZ/this_will_work | student-work/hobson_lane/exercism/python/isogram/isogram.py | Python | mit | 428 | 0.002336 | def is_isogram(s):
""" Determine if a word or phrase | is an isogram.
An isogram (also known as a "nonpattern word") is a word or phrase without a repeating letter.
Examples of isograms:
- lumberjacks
- background
- downstream
"""
from collections import Counter
s = s.lower().strip()
s = [c for c in s if c.isalpha()]
counts = Counter(s).values()
return max(counts o | r [1]) == 1
|
racker/txzookeeper | txzookeeper/retry.py | Python | gpl-3.0 | 10,571 | 0 | #
# Copyright (C) 2011 Canonical Ltd. All Rights Reserved
#
# This file is part of txzookeeper.
#
# Authors:
# Kapil Thangavelu
#
# txzookeeper is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# txzookeeper is distributed in the hope that it will be useful,
# but WITHOUT ANY | WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GN | U Lesser General Public License
# along with txzookeeper. If not, see <http://www.gnu.org/licenses/>.
#
"""
A retry client facade that transparently handles transient connection
errors.
"""
import time
import zookeeper
from twisted.internet.defer import inlineCallbacks, returnValue, Deferred
__all__ = ["retry", "RetryClient"]
def is_retryable(e):
"""Determine if an exception signifies a recoverable connection error.
"""
return isinstance(
e,
(zookeeper.ClosingException,
zookeeper.ConnectionLossException,
zookeeper.OperationTimeoutException))
def sleep(delay):
"""Non-blocking sleep.
"""
from twisted.internet import reactor
d = Deferred()
reactor.callLater(delay, d.callback, None)
return d
def get_delay(session_timeout, max_delay=5, session_fraction=30.0):
"""Get retry delay between retrying an operation.
Returns either the specified fraction of a session timeout or the
max delay, whichever is smaller.
The goal is to allow the connection time to auto-heal, before
retrying an operation.
:param session_timeout: The timeout for the session, in milliseconds
:param max_delay: The max delay for a retry, in seconds.
:param session_fraction: The fractional amount of a timeout to wait
"""
retry_delay = session_timeout / (float(session_fraction) * 1000)
return min(retry_delay, max_delay)
def check_retryable(retry_client, max_time, error):
"""Check an error and a client to see if an operation is retryable.
:param retry_client: A txzookeeper client
:param max_time: The max time (epoch tick) that the op is retryable till.
:param error: The client operation exception.
"""
# Only if the error is known.
if not is_retryable(error):
return False
# Only if we've haven't exceeded the max allotted time.
if max_time <= time.time():
return False
# Only if the client hasn't been explicitly closed.
if not retry_client.connected:
return False
# Only if the client is in a recoverable state.
if retry_client.unrecoverable:
return False
return True
@inlineCallbacks
def retry(client, func, *args, **kw):
"""Constructs a retry wrapper around a function that retries invocations.
If the function execution results in an exception due to a transient
connection error, the retry wrapper will reinvoke the operation after
a suitable delay (fractional value of the session timeout).
:param client: A ZookeeperClient instance.
:param func: A callable python object that interacts with
zookeeper, the callable must utilize the same zookeeper
connection as passed in the `client` param. The function
must return a single value (either a deferred or result
value).
"""
while 1:
try:
value = yield func(*args, **kw)
except Exception, e:
# For clients which aren't connected (session timeout == None)
# we raise the errors to the callers
session_timeout = client.session_timeout or 0
# If we keep retrying past the 1.5 * session timeout without
# success just die, the session expiry is fatal.
max_time = session_timeout * 1.5 + time.time()
if not check_retryable(client, max_time, e):
raise
# Give the connection a chance to auto-heal.
yield sleep(get_delay(session_timeout))
continue
returnValue(value)
def retry_watch(client, func, *args, **kw):
"""Contructs a wrapper around a watch callable that retries invocations.
If the callable execution results in an exception due to a transient
connection error, the retry wrapper will reinvoke the operation after
a suitable delay (fractional value of the session timeout).
A watch function must return back a tuple of deferreds
(value_deferred, watch_deferred). No inline callbacks are
performed in here to ensure that callers continue to see a
tuple of results.
The client passed to this retry function must be the same as
the one utilized by the python callable.
:param client: A ZookeeperClient instance.
:param func: A python callable that interacts with zookeeper. If a
function is passed, a txzookeeper client must the first
parameter of this function. The function must return a
tuple of (value_deferred, watch_deferred)
"""
# For clients which aren't connected (session timeout == None)
# we raise the usage errors to the callers
session_timeout = client.session_timeout or 0
# If we keep retrying past the 1.5 * session timeout without
# success just die, the session expiry is fatal.
max_time = session_timeout * 1.5 + time.time()
value_d, watch_d = func(*args, **kw)
def retry_delay(f):
"""Errback, verifes an op is retryable, and delays the next retry.
"""
# Check that operation is retryable.
if not check_retryable(client, max_time, f.value):
return f
# Give the connection a chance to auto-heal
d = sleep(get_delay(session_timeout))
d.addCallback(retry_inner)
return d
def retry_inner(value):
"""Retry operation invoker.
"""
# Invoke the function
retry_value_d, retry_watch_d = func(*args, **kw)
# If we need to retry again.
retry_value_d.addErrback(retry_delay)
# Chain the new watch deferred to the old, presuming its doa
# if the value deferred errored on a connection error.
retry_watch_d.chainDeferred(watch_d)
# Insert back into the callback chain.
return retry_value_d
# Attach the retry
value_d.addErrback(retry_delay)
return value_d, watch_d
def _passproperty(name):
"""Returns a method wrapper that delegates to a client's property.
"""
def wrapper(retry_client):
return getattr(retry_client.client, name)
return property(wrapper)
class RetryClient(object):
"""A ZookeeperClient wrapper that transparently performs retries.
A zookeeper connection can experience transient connection failures
on any operation. As long as the session associated to the connection
is still active on the zookeeper cluster, libzookeeper can reconnect
automatically to the cluster and session and the client is able to
retry.
Whether a given operation is safe for retry depends on the application
in question and how's interacting with zookeeper.
In particular coordination around sequence nodes can be
problematic, as the client has no way of knowing if the operation
succeed or not without additional application specific context.
Idempotent operations against the zookeeper tree are generally
safe to retry.
This class provides a simple wrapper around a zookeeper client,
that will automatically perform retries on operations that
interact with the zookeeper tree, in the face of transient errors,
till the session timeout has been reached. All of the attributes
and methods of a zookeeper client are exposed.
All the methods of the client that interact with the zookeeper tree
are retry enabled.
"""
def __init__(self, client):
self.client = client
def add_auth(self, *args, **kw):
return retry(self.clie |
NoBodyCam/TftpPxeBootBareMetal | nova/tests/test_plugin_api_extensions.py | Python | apache-2.0 | 2,869 | 0.000349 | # Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pkg_resources
import nova
from nova.api.openstack.compute import extensions as computeextensions
from nova.api.openstack import extensions
from nova.openstack.common.plugin import plugin
from nova.openstack.common.plugin import pluginmanager
from nova import test
class StubController(object):
def i_am_the_stub(self):
pass
class StubControllerExtension(extensions.ExtensionDescriptor):
"""This is a docstring. We need it."""
name = 'stubextension'
alias = 'stubby'
def get_resources(self):
resources = []
res = extensions.ResourceExtension('testme',
StubController())
resources.append(res)
return resources
service_list = []
class TestPluginClass(plugin.Plugin):
def __init__(self, service_name):
super(TestPluginClass, self).__init__(service_name)
self._add_api_extension_descriptor(StubControllerExtension)
| service_list.append(service_name)
class MockEntrypoint(pkg_resources.EntryPoint):
def load(self):
return TestPluginClass
class APITestCase(test.TestCase):
"""Test case for the plugin api extension interface"""
def test_add_extension(self):
def mock_load(_s):
return TestPluginClass()
def mock_iter_entry_points(_t):
| return [MockEntrypoint("fake", "fake", ["fake"])]
self.stubs.Set(pkg_resources, 'iter_entry_points',
mock_iter_entry_points)
global service_list
service_list = []
# Marking out the default extension paths makes this test MUCH faster.
self.flags(osapi_compute_extension=[])
self.flags(osapi_volume_extension=[])
found = False
mgr = computeextensions.ExtensionManager()
for res in mgr.get_resources():
# We have to use this weird 'dir' check because
# the plugin framework muddies up the classname
# such that 'isinstance' doesn't work right.
if 'i_am_the_stub' in dir(res.controller):
found = True
self.assertTrue(found)
self.assertEqual(len(service_list), 1)
self.assertEqual(service_list[0], 'compute-extensions')
|
angvp/gord | gord/common.py | Python | apache-2.0 | 3,534 | 0.010187 | #!/usr/bin/python2.4
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Common libraries for GORD servers and clients."""
class Network(object):
"""SMS service network parameters.
Attributes:
SERVICE_HOST: String host that server will run on.
SERVICE_PORT: Integer port that server will run on.
AUTH_CLASS: String authentication mechanism to use, a class name.
USE_HTTPS: Boolean whether to use https or not.
PEM_FILENAME: String filename of server PEM file, or None to use default
(SERVER_ROOT)/server.pem.
"""
SERVICE_HOST = 'gordhost.some.com.domain'
SERVICE_PORT = 8188
AUTH_CLASS = 'AuthNone'
USE_HTTPS = False
PEM_FILENAME = None
class AuthConfig(object):
"""Class holding Auth Class configurations.
Attributes:
FAKE_LOGIN_METHODS: List of methods allowed to be called with fake login.
MAX_AUTHENTICATION_ATTEMPTS: Integer max number of authentication attempts
before failing.
"""
FAKE_LOGIN_METHODS = ['HelloWorld', 'Help']
NO_AUTH_METHODS = ['HelloWorldNoAuth']
MAX_AUTHENTICATION_ATTEMPTS = 3
# AuthLdap and derived classes
LDAP_SERVER_URI = 'ldap://ldap'
LDAP_SERVER_START_TLS = True
LDAP_SERVER_BIND = 'uid=%s,ou=People,dc=example,dc=com'
LDAP_SERVER_TLS_REQUIRE_CERT = False
# AuthWindows
WINDOWS_DOMAIN = 'domain'
class Error(Exception):
"""Base exception for common."""
class Fatal(Error):
"""Generic fatal error for sms."""
class NonFatal(Error):
"""Generic non-fatal error for sms."""
class ConnectionError(NonFatal):
"""There was an error opening a connection to the server."""
class TicketExpiredError(NonFatal):
"""The auth ticket has expired."""
class AuthenticationError(Fatal):
"""There was an error authenticating to the ticket service."""
class AccessDenied(Aut | henticationError):
"""T | he auth ticket was valid, but access was denied."""
class InvalidMethod(Fatal):
"""An invalid (non-existent or _private) method was called on GORD."""
class InvalidOption(Fatal):
"""An invalid option was specified to a GORD method."""
class InvalidArgumentsError(Fatal):
"""Invalid arguments were passed."""
class TicketInvalidError(AuthenticationError):
"""The auth ticket is in an invalid format."""
class RPCError(Fatal):
"""If an RPC error occurred during the request, usually recoverable."""
class TransportError(Fatal):
"""An error over whatever transport is being used to connect to SMS."""
class UnknownCollection(TransportError):
"""An unknown collection ID or name was passed."""
class HostNotFound(TransportError):
"""An active host was not found using the passed hostname."""
class HostAlreadyCollectionMember(TransportError):
"""The passed host is already a member of the passed collection."""
class CollectionMembershipError(TransportError):
"""There was an error adding a host to a collection."""
class HostUnreachable(TransportError):
"""The host is not reachable; it either did not resolve or respond to ping."""
|
hyperNURb/ggrc-core | src/tests/ggrc_workflows/notifications/test_recurring_cycles.py | Python | apache-2.0 | 3,679 | 0.008154 | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: miha@reciprocitylabs.com
# Maintained By: miha@reciprocitylabs.com
import random
from tests.ggrc import TestCase
from freezegun import freeze_time
import os
from mock import patch
from ggrc import notification
from ggrc.models import Person
from tests.ggrc_workflows.generator import WorkflowsGenerator
from tests.ggrc.api_helper import Api
from tests.ggrc.generator import ObjectGenerator
from ggrc_workflows import views
if os.environ.get('TRAVIS', False):
random.seed(1) # so we can reproduce the tests if needed
class TestRecurringCycleNotifications(TestCase):
def setUp(self):
TestCase.setUp(self)
self.api = Api()
self.generator = WorkflowsGenerator()
self.object_generator = ObjectGenerator()
_, self.assignee = self.object_generator.generate_person(
user_role="gGRC Admin")
self.create_test_cases()
def tearDown(self):
pass
def test_cycle_starts_in_less_than_X_days(self):
with freeze_time("2015-02-01"):
_, wf = self.generator.generate_workflow(self.quarterly_wf_1)
response, wf = self.generator.activate_workflow(wf)
self.assert200(response)
assignee = Person.query.get(self.assignee.id)
with freeze_time("2015-01-01"):
_, notif_data = notification.get_todays_notifications()
self.assertNotIn(assignee.email, notif_data)
with freeze_time("2015-01-29"):
_, notif_data = notification.get_todays_notifications()
self.assertIn(assignee.email, notif_data)
with freeze_time("2015-02-01"):
_, notif_data = notification.get_todays_notifications()
self.assertIn(assignee.email, notif_data)
# TODO: this should mock google email api.
@patch("ggrc.notification.email.send_email")
def test_marking_sent_notifications(self, mail_mock):
mail_mock.return_value = True
with freeze_time("2015-02-01"):
_, wf = self.generator.generate_workflow(self.quarterly_wf_1)
response, wf = self.generator.activate_workflow(wf)
self.assert200(response)
assignee = Person.query.get(self.assignee.id)
with freeze_time("2015-01-01"):
_, notif_data = notification.get_todays_notifications()
self.assertNotIn(assignee.email, notif_data)
with freeze_time("2015-01-29"):
views.send_todays_digest_notifications()
_, notif_data = notification.get_todays_notifications()
self.assertNotIn(assignee.email, notif_data)
with freeze_time("2015-02-01"):
_, notif_data = notification.get_todays_notifications()
self.assertNotIn(assignee.email, notif_data)
def create_test_cases(self):
def person_dict(person_id):
return {
"href": "/api/people/%d" % person_id,
"id": person_id,
"type": "Person"
}
self.quarterly_wf_1 = {
"title": "quarterly wf 1",
"description": "",
"owners": [person_dict(self.assignee.id)],
"frequency": "quarterly",
"notify_on_change": True,
"task_groups": [{
"title": "tg_1",
"contact": person_dict(self.assignee.id),
"task_group_tasks": [{
| "contact": person_dict(self.assignee.id),
"description": self.generator.random_str(10 | 0),
"relative_start_day": 5,
"relative_start_month": 2,
"relative_end_day": 25,
"relative_end_month": 2,
},
],
},
]
}
self.all_workflows = [
self.quarterly_wf_1,
]
|
mlundblad/telepathy-gabble | tests/twisted/sasl/saslutil.py | Python | lgpl-2.1 | 5,861 | 0.004949 | # hey, Python: encoding: utf-8
from twisted.words.protocols.jabber.xmlstream import NS_STREAMS
from gabbletest import XmppAuthenticator
from base64 import b64decode, b64encode
from twisted.words.xish import domish
import constants as cs
import ns
from servicetest import (ProxyWrapper, EventPattern, assertEquals,
assertLength, Event)
class SaslChannelWrapper(ProxyWrapper):
def __init__(self, object, default=cs.CHANNEL, interfaces={
"ServerAuthentication" : cs.CHANNEL_TYPE_SERVER_AUTHENTICATION,
"SASLAuthentication" : cs.CHANNEL_IFACE_SASL_AUTH}):
ProxyWrapper.__init__(self, object, default, interfaces)
class SaslEventAuthenticator(XmppAuthenticator):
def __init__(self, jid, mechanisms):
XmppAuthenticator.__init__(self, jid, '')
self._mechanisms = mechanisms
def streamStarted(self, root=None):
if root:
self.xmlstream.sid = root.getAttribute('id')
self.xmlstream.sendHeader()
if self.authenticated:
# Initiator authenticated itself, and has started a new stream.
| features = domish.Element((NS_STREAMS, 'features'))
bind = features.addElement((ns.NS_XMPP_BIND, 'bind'))
self.xmlstream.send(features)
self.xmlstr | eam.addOnetimeObserver(
"/iq/bind[@xmlns='%s']" % ns.NS_XMPP_BIND, self.bindIq)
else:
features = domish.Element((NS_STREAMS, 'features'))
mechanisms = features.addElement((ns.NS_XMPP_SASL, 'mechanisms'))
for mechanism in self._mechanisms:
mechanisms.addElement('mechanism', content=mechanism)
self.xmlstream.send(features)
self.xmlstream.addOnetimeObserver("/auth", self._auth)
self.xmlstream.addObserver("/response", self._response)
self.xmlstream.addObserver("/abort", self._abort)
def failure(self, fail_str):
reply = domish.Element((ns.NS_XMPP_SASL, 'failure'))
reply.addElement(fail_str)
self.xmlstream.send(reply)
self.xmlstream.reset()
def abort(self):
self.failure('abort')
def not_authorized(self):
self.failure('not-authorized')
def success(self, data=None):
reply = domish.Element((ns.NS_XMPP_SASL, 'success'))
if data is not None:
reply.addContent(b64encode(data))
self.xmlstream.send(reply)
self.authenticated=True
self.xmlstream.reset()
def challenge(self, data):
reply = domish.Element((ns.NS_XMPP_SASL, 'challenge'))
reply.addContent(b64encode(data))
self.xmlstream.send(reply)
def _auth(self, auth):
# Special case in XMPP: '=' means a zero-byte blob, whereas an empty
# or self-terminating XML element means no initial response.
# (RFC 3920 §6.2 (3))
if str(auth) == '':
self._event_func(Event('sasl-auth', authenticator=self,
has_initial_response=False,
initial_response=None,
xml=auth))
elif str(auth) == '=':
self._event_func(Event('sasl-auth', authenticator=self,
has_initial_response=False,
initial_response=None,
xml=auth))
else:
self._event_func(Event('sasl-auth', authenticator=self,
has_initial_response=True,
initial_response=b64decode(str(auth)),
xml=auth))
def _response(self, response):
self._event_func(Event('sasl-response', authenticator=self,
response=b64decode(str(response)),
xml=response))
def _abort(self, abort):
self._event_func(Event('sasl-abort', authenticator=self,
xml=abort))
def connect_and_get_sasl_channel(q, bus, conn):
conn.Connect()
q.expect('dbus-signal', signal='StatusChanged',
args=[cs.CONN_STATUS_CONNECTING, cs.CSR_REQUESTED])
return expect_sasl_channel(q, bus, conn)
def expect_sasl_channel(q, bus, conn):
old_signal, new_signal = q.expect_many(
EventPattern('dbus-signal', signal='NewChannel',
predicate=lambda e:
e.args[1] == cs.CHANNEL_TYPE_SERVER_AUTHENTICATION),
EventPattern('dbus-signal', signal='NewChannels',
predicate=lambda e:
e.args[0][0][1].get(cs.CHANNEL_TYPE) ==
cs.CHANNEL_TYPE_SERVER_AUTHENTICATION),
)
path, type, handle_type, handle, suppress_handler = old_signal.args
chan = SaslChannelWrapper(bus.get_object(conn.bus_name, path))
assertLength(1, new_signal.args[0])
assertEquals(path, new_signal.args[0][0][0])
props = new_signal.args[0][0][1]
assertEquals(cs.CHANNEL_IFACE_SASL_AUTH, props.get(cs.AUTH_METHOD))
return chan, props
def abort_auth(q, chan, reason, message):
reason_err_map = {
cs.SASL_ABORT_REASON_USER_ABORT : cs.CANCELLED,
cs.SASL_ABORT_REASON_INVALID_CHALLENGE : cs.SERVICE_CONFUSED }
mapped_error = reason_err_map.get(reason, cs.CANCELLED)
chan.SASLAuthentication.AbortSASL(reason, message)
ssc, ce, _ = q.expect_many(
EventPattern(
'dbus-signal', signal='SASLStatusChanged',
interface=cs.CHANNEL_IFACE_SASL_AUTH,
predicate=lambda e: e.args[0] == cs.SASL_STATUS_CLIENT_FAILED),
EventPattern('dbus-signal', signal='ConnectionError'),
EventPattern(
'dbus-signal', signal="StatusChanged",
args=[cs.CONN_STATUS_DISCONNECTED,
cs.CSR_AUTHENTICATION_FAILED]))
assertEquals(cs.SASL_STATUS_CLIENT_FAILED, ssc.args[0])
assertEquals(mapped_error, ssc.args[1])
assertEquals(message, ssc.args[2].get('debug-message')),
assertEquals(mapped_error, ce.args[0])
|
mupen64plus-ae/mupen64plus-ae | ndkLibs/miniupnp/minissdpd/submit_to_minissdpd.py | Python | gpl-3.0 | 1,938 | 0.004644 | #!/usr/bin/env python3
# vim: sw=4 ts=4 expandtab
# (c) 2021 Thomas BERNARD
# Python3 module to submit service to running MiniSSDPd
# MiniSSDPd: See http://miniupnp.free.fr/minissdpd.html
import socket, os
def codelength(s):
""" returns the given string/bytes as bytes, prepended with the 7-bit-encoded length """
# We want bytes
if not isinstance(s, bytes):
# Not bytes. Let's try to convert to bytes, but only plain ASCII
try:
s = str.encode(s, "ascii")
except:
s = b''
l = len(s)
if l == 0:
return b'\x00'
encodedlen = (l & 0x7F).to_bytes(1, 'little')
while l > 0x7F:
l = l >> 7
c = (l & 0x7F) | 0x80
encodedlen = c.to_bytes(1, 'little') + encodedlen
return encodedlen + s
def submit_to_minissdpd(st, usn, server, url, sockpath="/var/run/minissdpd.sock"):
""" submits the specified service to MiniSSDPD (if running)"""
# First check if sockpath exists i.e. MiniSSDPD is running
if not os.path.exists(sockpath):
return -1, f"Error: {sockpath} does not exist. Is minissdpd running?"
# OK, submit
sock = socket.socket(socket.AF_UNIX, socke | t.SOCK_STREAM)
try:
sock.connect(sockpath)
sock.send(b'\x04' + codelength(st) + codelength(usn) + codelength(server) + codelength(url))
ex | cept socket.error as msg:
print(msg)
return -1, msg
finally:
sock.close()
return 0, "OK"
if __name__ == "__main__":
# Example usage
rc, message = submit_to_minissdpd(
b'urn:schemas-upnp-org:device:InternetGatewayDevice:1',
b'uuid:73616d61-6a6b-7a74-650a-0d24d4a5d636::urn:schemas-upnp-org:device:InternetGatewayDevice:1',
b'MyServer/0.0',
b'http://192.168.0.1:1234/rootDesc.xml',
)
if rc == 0:
print("OK: submitting to MiniSSDPD went well")
else:
print("Not OK. Error message is:", message)
|
jamespcole/home-assistant | homeassistant/components/dlib_face_detect/image_processing.py | Python | apache-2.0 | 2,155 | 0 | """
Component that will help set the Dlib face detect processing.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/image_processing.dlib_face_detect/
"""
import logging
import io
from homeassistant.core import split_entity_id
# pylint: disable=unused-import
from homeassistant.components.image_processing import PLATFORM_SCHEMA # noqa
from homeassistant.components.image_processing import (
ImageProcessingFaceEntity, CONF_SOURCE, CONF_ENTITY_ID, CONF_NAME)
REQUIREMENTS = ['face_recognition==1.2.3']
_LOGGER = logging.getLogger(__name__)
ATTR_LOCATION = 'location'
def setup_platform(hass, config, add_entities | , discovery_info=None):
"""Set up the Dlib Face detection platform."""
entities = []
for camera in config[CONF_SOURCE]:
entities.append(DlibFaceDetectEntity(
camera[CONF_ENTITY_ID], camera.get(CONF_NAME)
))
add_entities(entities)
class DlibFaceDetectEntity(ImageProcessingFaceEntity):
"""Dlib Face API entity for identify."""
def __init__(self, camera_entity, name=None):
| """Initialize Dlib face entity."""
super().__init__()
self._camera = camera_entity
if name:
self._name = name
else:
self._name = "Dlib Face {0}".format(
split_entity_id(camera_entity)[1])
@property
def camera_entity(self):
"""Return camera entity id from process pictures."""
return self._camera
@property
def name(self):
"""Return the name of the entity."""
return self._name
def process_image(self, image):
"""Process image."""
import face_recognition # pylint: disable=import-error
fak_file = io.BytesIO(image)
fak_file.name = 'snapshot.jpg'
fak_file.seek(0)
image = face_recognition.load_image_file(fak_file)
face_locations = face_recognition.face_locations(image)
face_locations = [{ATTR_LOCATION: location}
for location in face_locations]
self.process_faces(face_locations, len(face_locations))
|
sk89q/Plumeria | plumeria/core/scoped_config/__init__.py | Python | mit | 4,341 | 0.002534 | import logging
from plumeria.command import commands, CommandError
from plumeria.core.storage import pool
from plumeria.core.user_prefs import prefs_manager
from plumeria.message import Message
from plumeria.message.mappings import build_mapping
from plumeria.perms import direct_only
from plumeria.core.scoped_config.manager import ScopedConfig, ScopedValue
from plumeria.core.scoped_config.storage import DatabaseConfig
__requires__ = ['plumeria.core.storage']
logger = logging.getLogger(__name__)
scoped_config = ScopedConfig()
def find_preference(name):
try:
return prefs_manager.get_preference(name)
except KeyError:
raise CommandError("No such preference **{}** exists.".format(name))
@commands.create('pref set', 'prefs set', 'pset', cost=4, category='User Preferences')
@direct_only
async def set(message: Message):
"""
Set a user preference for yourself.
The values of certain preferences marked private cannot be seen again after it has been set.
Example::
/pset ifttt_maker_key N1waEaZ2rUKxbTMTdf
"""
parts = message.content.split(" ", 1)
if len(parts) != 2:
raise CommandError("<name> <value>")
name, raw_value = parts
pref = find_preference(name)
try:
| await prefs_manager.put(pref, message.author, raw_value)
shown_value = raw_value if not pref.private else "(private)"
return "Set **{}** to '{}' for yourself.".format(name, shown_value)
except (NotImplementedError, ValueError) as e:
raise CommandError("Could not set **{}**: {}". | format(name, str(e)))
@commands.create('pref unset', 'prefs unset', 'punset', cost=4, category='User Preferences')
@direct_only
async def unset(message: Message):
"""
Remove a user preference that you have set.
Example::
/punset ifttt_maker_key
"""
pref = find_preference(message.content.strip())
try:
await prefs_manager.remove(pref, message.author)
return "Removed **{}**.".format(pref.name)
except KeyError:
raise CommandError("You haven't set that preference.")
except NotImplementedError as e:
raise CommandError("Could not delete **{}**: {}".format(pref.name, str(e)))
@commands.create('pref get', 'prefs get', 'pget', cost=4, category='User Preferences')
async def get(message: Message):
"""
Get what you have set for a preference.
The values of private preferences will not be shown.
Example::
/pget some_var
"""
pref = find_preference(message.content.strip())
try:
value = await prefs_manager.get(pref, message.author)
shown_value = value if not pref.private else "(private)"
return shown_value
except KeyError:
raise CommandError("You haven't set that preference.")
except NotImplementedError as e:
raise CommandError("Could not get **{}**: {}".format(pref.name, str(e)))
@commands.create('pref list', 'prefs list', 'prefs', cost=4, category='User Preferences')
async def list(message: Message):
"""
Get a list of preferences that have been set for yourself.
Private preferences will not be shown with their value.
"""
try:
prefs = await prefs_manager.get_all(message.author)
if len(prefs):
items = [(pref.name, value if not pref.private else "(private)") for pref, value in prefs]
return build_mapping(items)
else:
raise CommandError("You have not set any preferences.")
except NotImplementedError as e:
raise CommandError("Could not get your preferences: {}".format(str(e)))
@commands.create('pref defaults', 'prefs defaults', cost=4, category='User Preferences')
async def list_defaults(message: Message):
"""
Get a list of preferences that can be set.
"""
prefs = prefs_manager.get_preferences()
if len(prefs):
items = [(pref.name, '{} (value: {})'.format(pref.comment, pref.fallback)) for pref in prefs]
return build_mapping(items)
else:
raise CommandError("No preferences exist to be set.")
async def setup():
db_config = DatabaseConfig(pool)
await db_config.init()
scoped_config.provider = db_config
commands.add(set)
commands.add(unset)
commands.add(get)
commands.add(list)
commands.add(list_defaults)
|
MD-Studio/MDStudio | core/logger/logger/application.py | Python | apache-2.0 | 4,843 | 0.003097 | # -*- coding: utf-8 -*-
from logger.log_repository import LogRepository
from mdstudio.api.api_result import APIResult
from mdstudio.api.comparison import Comparison
from mdstudio.api.endpoint import endpoint, cursor_endpoint
from mdstudio.api.exception import CallException
from mdstudio.component.impl.core import CoreComponentSession
from mdstudio.deferred.chainable import chainable
from mdstudio.deferred.return_value import return_value
from mdstudio.logging.log_type import LogType
class LoggerComponent(CoreComponentSession):
# type: LogRepository
logs = None
# type: LoggerComponent.ComponentWaiter
db_waiter = None
# type: LoggerComponent.ComponentWaiter
schema_waiter = None
@chainable
def on_run(self):
yield self.call('mdstudio.auth.endpoint.ring0.set-status', {'status': True})
yield super(LoggerComponent, self).on_run()
def pre_init(self):
self.logs = LogRepository(self.db)(self.grouprole_context('mdstudio', 'logger'))
self.db_waiter = self.ComponentWaiter(self, 'db', self.group_context('mdstudio'))
self.schema_waiter = self.ComponentWaiter(self, 'schema', self.group_context('mdstudio'))
self.component_waiters.append(self.db_waiter)
self.component_waiters.append(self.schema_waiter)
super(LoggerComponent, self).pre_init()
@cursor_endpoint('mdstudio.logger.endpoint.get-logs', 'get/logs-request/v1', 'get/logs-response/v1')
def get_logs(self, request, claims, **kwargs):
with self.grouprole_context('mdstudio', 'logger'):
logfilter = {}
if 'level' in request:
logfilter['level'] = {
'${}'.format(Comparison.from_string(logfilter['level']['comparison'])): self._map_level(logfilter['level']['value'])
}
if 'source' in request:
logfilter['source'] = {'$regex': logfilter['source']['pattern']}
if 'options' in request['source']:
logfilter['source']['$options'] = request['source']['options']
if 'time' in request:
logfilter['time'] = {}
if 'since' in request['time']:
logfilter['time']['$gte'] = request['time']['since']
if 'until' in request['time']:
logfilter['time']['$lte'] = request['time']['until']
if 'createdAt' in request:
logfilter['createdAt'] = {}
if 'since' in request['createdAt']:
logfilter['createdAt']['$gte'] = request['createdAt']['since']
if 'until' in request['createdAt']:
logfilter['createdAt']['$lte'] = request['createdAt']['until']
return self.logs.get(logfilter, claims, **kwargs)
@endpoint('push-logs', 'push/logs-request/v1', 'push/logs-response/v1')
@chainable
def push_logs(self, request, claims=None):
try:
res = yield self.logs.insert(self._clean_claims(claims), [self._map_level(l) for l in request['logs']])
except CallException as _:
return_value(APIResult(error='The database is not online, please try again later.'))
else:
return_value({
'inserted': len(res)
})
@endpoint('push-event', 'push/event-request/v1', 'push/event-response/v1')
@chainable
def push_event(self, request, claims=None | ):
try:
event = request['event']
tags = event.pop('tags')
res = yield self.logs.insert(self._clean_claims(claims), [self._map_level(event)], tags)
except CallException as _:
return_value(APIResult(error='The database is not online, please try again later.'))
else:
return_value(len(res) | )
def authorize_request(self, uri, claims):
connection_type = LogType.from_string(claims['logType'])
if connection_type == LogType.User:
return ('username' in claims) is True
elif connection_type == LogType.Group:
return ('group' in claims) is True
elif connection_type == LogType.GroupRole:
return all(key in claims for key in ['group', 'role'])
return False
@staticmethod
def _map_level(log, from_int=False):
from_map = {
'debug': 0,
'info': 10,
'warn': 20,
'error': 30,
'critical': 40
}
to_map = {
0: 'debug',
10: 'info',
20: 'warn',
30: 'error',
40: 'critical'
}
if from_int:
log['level'] = to_map[log['level']]
else:
log['level'] = from_map[log['level']]
return log
@staticmethod
def _clean_claims(claims):
return claims
|
nandub/yammer | lib/KeyStore.py | Python | gpl-2.0 | 2,728 | 0.025293 | # Copyright 2002, 2004 John T. Reese.
# email: jtr at ofb.net
#
# This file is part of Yammer.
#
# Yammer is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Yammer is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Yammer; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys, os, string, time
from InstallSettings import settings
import YOS, glob, Gale
from pygale.pygale import *
import pygale.authcache
def havePrivateFor(recp):
return authcache.have_a_privkey([recp])
def privRoot():
return os.environ['HOME'] + "/.gale/auth/private/"
def privPath(recp):
path= privRoot() + recp
if os.path.exists(path):
return path
path= path + ".gpri"
if os.path.exists(path):
return path
return None
def getFullname(keyname):
n, k= lookup_location(keyname, do_akd=False)
if not isinstance(k, (tuple, list)) or not hasattr(k[0], 'comment'):
return '?? %r ??' % k
return k[0].comment()
def userFromKey(key):
key= os.path.basename(key)
if len(key) > 5 and key[-5:] == '.gpri':
key= key[:-5]
return key
# returns a tuple: signed time/expire time, or None if the key is too
# old to have these fields
def getKeyTime(key):
p= privPath(key)
return os.stat(p).st_mtime
def listPrivateUsers():
privs= []
for domain in settings['galeDomains'].split():
privs += glob.glob(privRoot() + "*@" + domain)
privs += glob.glob(privRoot() + "*@" + domain \
+ ".gpri")
return map(userFromKey, privs)
def pubPath(recp):
path= os.environ['HOME'] + "/.gale/auth/local/" + recp
if os.path.exists(path):
return path
path= privRoot() + recp + ".gpub"
if os.path.exists(path):
return path
return None
def gkgen(id, name, source=None):
from YGaleClient import YGaleClient
if string.find(id, '@') == -1:
raise 'nodomain'
error= ''
gkf= YOS.psafepopen(['gkgen', id, '/' + name], 'r')
while 1:
line= gkf.readline()
if not line: break
error= error + line
if gkf.close() == | 0:
try:
yg= YGaleClient()
msg= 'new key: %(id)s %(name)s' % locals()
if | source is not None:
msg += ' (via %s)' % source
yg.gsend(id, ['_gale.notice.' + id], msg)
except:
pass
return
else:
return error
|
bobflagg/deepER | notebook/evaluation.py | Python | apache-2.0 | 5,910 | 0.00643 | from collections import OrderedDict
def _update_chunk(candidate, prev, current_tag, current_chunk, current_pos, prediction=False):
if candidate == 'B-' + current_tag:
if len(current_chunk) > 0 and len(current_chunk[-1]) == 1:
current_chunk[-1].append(current_pos - 1)
current_chunk.append([current_pos])
elif candidate == 'I-' + current_tag:
if prediction and (current_pos == 0 or current_pos > 0 and prev.split('-', 1)[-1] != current_tag):
current_chunk.append([current_pos])
if not prediction and (current_pos == 0 or current_pos > 0 and prev == 'O'):
current_chunk.append([current_pos])
elif current_pos > 0 and prev.split('-', 1)[-1] == current_tag:
if len(current_chunk) > 0:
current_chunk[-1].append(current_pos - 1)
def _update_last_chunk(current_chunk, current_pos):
if len(current_chunk) > 0 and len(current_chunk[-1]) == 1:
current_chunk[-1].append(current_pos - 1)
def _tag_precision_recall_f1(tp, fp, fn):
precision, recall, f1 = 0, 0, 0
if tp + fp > 0:
precision = tp / (tp + fp) * 100
if tp + fn > 0:
recall = tp / (tp + fn) * 100
if precision + recall > 0:
f1 = 2 * precision * recall / (precision + recall)
return precision, recall, f1
def _aggregate_metrics(results, total_correct):
total_true_entities = 0
total_predicted_entities = 0
total_precision = 0
total_recall = 0
total_f1 = 0
for tag, tag_metrics in results.items():
n_pred = tag_metrics['n_predicted_entities']
n_true = tag_metrics['n_true_entities']
total_true_entities += n_true
total_predicted_entities += n_pred
total_precision += tag_metrics['precision'] * n_pred
total_recall += tag_metrics['recall'] * n_true
accuracy = total_correct / total_true_entities * 100
if total_predicted_entities > 0:
total_precision = total_precision / total_predicted_entities
total_recall = total_recall / total_true_entities
if total_precision + total_recall > 0:
total_f1 = 2 * total_precision * total_recall / (total_precision + total_recall)
return total_true_entities, total_predicted_entities, \
total_precision, total_recall, total_f1, accuracy
def _print_info(n_tokens, total_true_entities, total_predicted_entities, total_correct):
print('processed {len} tokens ' \
'with {tot_true} phrases; ' \
'found: {tot_pred} phrases; ' \
'correct: {tot_cor}.\n'.format(len=n_tokens,
tot_true=total_true_entities,
tot_pred=total_predicted_entities,
tot_cor=total_correct))
def _print_metrics(accuracy, total_precision, total_recall, total_f1):
print('precision: {tot_prec:.2f}%; ' \
'recall: {tot_recall:.2f}%; ' \
'F1: {tot_f1:.2f}\n'.format(acc=accuracy,
| tot_prec=total_precision,
tot_recall=total_recall,
tot_f1=total_f1))
def _print_tag_metrics(tag, tag_results):
print(('\t%12s' % tag) + ': precision: {tot_prec:6.2f}%; ' \
'recall: {tot_recall:6.2f}%; ' \
'F1: {tot_f1:6.2f}; ' \
'predicted: {tot_p | redicted:4d}\n'.format(tot_prec=tag_results['precision'],
tot_recall=tag_results['recall'],
tot_f1=tag_results['f1'],
tot_predicted=tag_results['n_predicted_entities']))
def precision_recall_f1(y_true, y_pred, print_results=True, short_report=False):
# Find all tags
tags = sorted(set(tag[2:] for tag in y_true + y_pred if tag != 'O'))
results = OrderedDict((tag, OrderedDict()) for tag in tags)
n_tokens = len(y_true)
total_correct = 0
# For eval_conll_try we find all chunks in the ground truth and prediction
# For each chunk we store starting and ending indices
for tag in tags:
true_chunk = list()
predicted_chunk = list()
for position in range(n_tokens):
_update_chunk(y_true[position], y_true[position - 1], tag, true_chunk, position)
_update_chunk(y_pred[position], y_pred[position - 1], tag, predicted_chunk, position, True)
_update_last_chunk(true_chunk, position)
_update_last_chunk(predicted_chunk, position)
# Then we find all correctly classified intervals
# True positive results
tp = sum(chunk in predicted_chunk for chunk in true_chunk)
total_correct += tp
# And then just calculate errors of the first and second kind
# False negative
fn = len(true_chunk) - tp
# False positive
fp = len(predicted_chunk) - tp
precision, recall, f1 = _tag_precision_recall_f1(tp, fp, fn)
results[tag]['precision'] = precision
results[tag]['recall'] = recall
results[tag]['f1'] = f1
results[tag]['n_predicted_entities'] = len(predicted_chunk)
results[tag]['n_true_entities'] = len(true_chunk)
total_true_entities, total_predicted_entities, \
total_precision, total_recall, total_f1, accuracy = _aggregate_metrics(results, total_correct)
if print_results:
_print_info(n_tokens, total_true_entities, total_predicted_entities, total_correct)
_print_metrics(accuracy, total_precision, total_recall, total_f1)
if not short_report:
for tag, tag_results in results.items():
_print_tag_metrics(tag, tag_results)
return results
|
DE-IBH/cmk_cisco-dom | perfometer/cisco_dom.py | Python | gpl-2.0 | 1,321 | 0.004542 | #!/usr/bin/python
# -*- encoding: utf-8; py-indent-offset: 4 -*-
# cmk_cisco-dom - check-mk plugin for SNMP-based Cisco Digital-Optical-Monitoring monitoring
#
# Authors:
# Thomas Liske <liske@ibh.de>
#
# Copyright Holder:
# 2015 - 2016 (C) IBH IT-Service GmbH [http://www.ibh.de/]
#
# License:
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOU | T ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this package; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# |
def perfometer_cisco_dom(row, check_command, perf_data):
color = { 0: "#a4f", 1: "#ff2", 2: "#f22", 3: "#fa2" }[row["service_state"]]
return "%.1f dBm" % perf_data[0][1], perfometer_logarithmic(perf_data[0][1] + 20, 20, 2, color)
perfometers["check_mk-cisco_dom"] = perfometer_cisco_dom
|
KHU-YoungBo/pybluez | bluetooth/__init__.py | Python | gpl-2.0 | 5,991 | 0.002337 | import sys
import os
if sys.version < '3':
from .btcommon import *
else:
from bluetooth.btcommon import *
__version__ = 0.22
def _dbg(*args):
return
sys.stderr.write(*args)
sys.stderr.write("\n")
if sys.platform == "win32":
_dbg("trying widcomm")
have_widcomm = False
dll = "wbtapi.dll"
sysroot = os.getenv ("SystemRoot")
if os.path.exists (dll) or \
os.path.exists (os.path.join (sysroot, "system32", dll)) or \
os.path.exists (os.path.join (sysroot, dll)):
try:
from . import widcomm
if widcomm.inquirer.is_device_ready ():
# if the Widcomm stack is active and a Bluetooth device on that
# stack is detected, then use the Widcomm stack
from .widcomm i | mport *
have_widcomm = True
except ImportError:
pass
if not have_widcomm:
# otherwise, fall back to the Microsoft stack
_dbg("Widcomm not ready. falling back to MS stack")
if | sys.version < '3':
from .msbt import *
else:
from bluetooth.msbt import *
elif sys.platform.startswith("linux"):
if sys.version < '3':
from .bluez import *
else:
from bluetooth.bluez import *
elif sys.platform == "darwin":
from .osx import *
else:
raise Exception("This platform (%s) is currently not supported by pybluez." % sys.platform)
discover_devices.__doc__ = \
"""
performs a bluetooth device discovery using the first available bluetooth
resource.
if lookup_names is False, returns a list of bluetooth addresses.
if lookup_names is True, returns a list of (address, name) tuples
lookup_names=False
if set to True, then discover_devices also attempts to lookup the
display name of each detected device.
if lookup_class is True, the class of the device is added to the tuple
"""
lookup_name.__doc__ = \
"""
Tries to determine the friendly name (human readable) of the device with
the specified bluetooth address. Returns the name on success, and None
on failure.
"""
advertise_service.__doc__ = \
"""
Advertises a service with the local SDP server. sock must be a bound,
listening socket. name should be the name of the service, and service_id
(if specified) should be a string of the form
"XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", where each 'X' is a hexadecimal
digit.
service_classes is a list of service classes whose this service belongs to.
Each class service is a 16-bit UUID in the form "XXXX", where each 'X' is a
hexadecimal digit, or a 128-bit UUID in the form
"XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX". There are some constants for
standard services, e.g. SERIAL_PORT_CLASS that equals to "1101". Some class
constants:
SERIAL_PORT_CLASS LAN_ACCESS_CLASS DIALUP_NET_CLASS
HEADSET_CLASS CORDLESS_TELEPHONY_CLASS AUDIO_SOURCE_CLASS
AUDIO_SINK_CLASS PANU_CLASS NAP_CLASS
GN_CLASS
profiles is a list of service profiles that thie service fulfills. Each
profile is a tuple with ( uuid, version). Most standard profiles use
standard classes as UUIDs. PyBluez offers a list of standard profiles,
for example SERIAL_PORT_PROFILE. All standard profiles have the same
name as the classes, except that _CLASS suffix is replaced by _PROFILE.
provider is a text string specifying the provider of the service
description is a text string describing the service
A note on working with Symbian smartphones:
bt_discover in Python for Series 60 will only detect service records
with service class SERIAL_PORT_CLASS and profile SERIAL_PORT_PROFILE
"""
stop_advertising.__doc__ = \
"""
Instructs the local SDP server to stop advertising the service associated
with sock. You should typically call this right before you close sock.
"""
find_service.__doc__ = \
"""
find_service (name = None, uuid = None, address = None)
Searches for SDP services that match the specified criteria and returns
the search results. If no criteria are specified, then returns a list of
all nearby services detected. If more than one is specified, then
the search results will match all the criteria specified. If uuid is
specified, it must be either a 16-bit UUID in the form "XXXX", where each
'X' is a hexadecimal digit, or as a 128-bit UUID in the form
"XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX". A special case of address is
"localhost", which will search for services on the local machine.
The search results will be a list of dictionaries. Each dictionary
represents a search match and will have the following key/value pairs:
host - the bluetooth address of the device advertising the
service
name - the name of the service being advertised
description - a description of the service being advertised
provider - the name of the person/organization providing the service
protocol - either 'RFCOMM', 'L2CAP', None if the protocol was not
specified, or 'UNKNOWN' if the protocol was specified but
unrecognized
port - the L2CAP PSM # if the protocol is 'L2CAP', the RFCOMM
channel # if the protocol is 'RFCOMM', or None if it
wasn't specified
service-classes - a list of service class IDs (UUID strings). possibly
empty
profiles - a list of profiles - (UUID, version) pairs - the
service claims to support. possibly empty.
service-id - the Service ID of the service. None if it wasn't set
See the Bluetooth spec for the difference between
Service ID and Service Class ID List
"""
|
ipfs/py-ipfs-api | test/unit/test_http_httpx.py | Python | mit | 2,058 | 0.026725 | # Only add tests to this file if they really are specific to the behaviour
# of this backend. For cr | oss-backend or `http_common.py` tests use
# `test_http.py` instead.
import http.cookiejar
import math
import pytest
pytest.importorskip("ipfshttpclient.http_httpx")
import ipfshttpclient.http_httpx
cookiejar = http.cookiejar.CookieJar()
@pytest.mark.parametrize("kwargs,expe | cted", [
({}, {}),
({
"auth": ("user", "pass"),
"cookies": cookiejar,
"headers": {"name": "value"},
"params": (("name", "value"),),
"timeout": (math.inf, math.inf),
}, {
"auth": ("user", "pass"),
"cookies": cookiejar,
"headers": {"name": "value"},
"params": [("name", "value")],
"timeout": (None, None, None, None),
}),
({
"auth": ("user", b"pass"),
"cookies": {"name": "value"},
"headers": ((b"name", b"value"),),
"timeout": 34,
}, {
"auth": ("user", b"pass"),
"cookies": {"name": "value"},
"headers": ((b"name", b"value"),),
"timeout": 34,
}),
])
def test_map_args_to_httpx(kwargs, expected):
assert ipfshttpclient.http_httpx.map_args_to_httpx(**kwargs) == expected
@pytest.mark.parametrize("args,kwargs,expected_kwargs,expected_base,expected_laddr", [
(("/dns/localhost/tcp/5001/http", "api/v0"), {}, {
"params": [("stream-channels", "true")],
}, "http://localhost:5001/api/v0/", None),
(("/dns6/ietf.org/tcp/443/https", "/base/"), {
"auth": ("user", "pass"),
"cookies": cookiejar,
"headers": {"name": "value"},
"offline": True,
"timeout": (math.inf, math.inf),
}, {
"auth": ("user", "pass"),
"cookies": cookiejar,
"headers": {"name": "value"},
"params": [("offline", "true"), ("stream-channels", "true")],
"timeout": (None, None, None, None),
}, "https://ietf.org:443/base/", "::"),
])
def test_client_args_to_session_kwargs(args, kwargs, expected_kwargs, expected_base, expected_laddr):
client = ipfshttpclient.http_httpx.ClientSync(*args, **kwargs)
assert client._session_kwargs == expected_kwargs
assert client._session_base == expected_base
assert client._session_laddr == expected_laddr
|
clungzta/rsync_ros | src/rsync_server_node.py | Python | bsd-3-clause | 4,108 | 0.003651 | #!/usr/bin/env python
# Software License Agreement (BSD License)
#
# Copyright (c) 2016, Alex McClung
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of the author may be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import rospy
import roslib; roslib.load_manifest('rsync_ros')
import actionlib
import os
from rsync import Rsync
from rsync_ros.msg import RsyncAction, RsyncResult, RsyncFeedback
class RsyncActionServer:
def __init__(self, name):
self._action_name = name
self.server = actionlib.SimpleActionServer(self._action_name, RsyncAction, self.execute, False)
self.server.start()
rospy.loginfo("Ready to sync files.")
def progress_update_cb(self, line, percent_complete, transfer_rate):
#This is run everytime the progress is published to stdout
#rospy.loginfo('Total transfer percentage: {}'.format(percent_complete))
self.feedback.percent_complete = percent_complete
self.feedback.transfer_rate = transfer_rate
self.server.publish_feedback(self.feedback)
if line:
rospy.loginfo(line)
# check if preempt (cancel action) has been requested by the client
if self.server.is_preempt_requested():
# Get the process id & try to terminate it gracefuly
pid = self.rsync.p.pid
self.rsync.p.terminate()
# Check if the process has really terminated & force kill if not.
try:
os.kill(pid, 0)
self.rsync.p.kill()
print "Forced kill"
except OSError, e:
print "Terminated gracefully"
rospy.loginfo('%s: Preempted' % self._action_name)
self.server.set_preempted() #TO-DO, fix logic error changing states upon preempt request
def execute(self, goal):
self.result = RsyncResult()
self.feedback = RsyncFeedback()
rospy.loginfo("Executing rsync command '%s %s %s'", 'rsync ' + ' '.join(goal.rsync_args) + ' --progress --outbuf=L', goal.source_path, goal.destination_path)
self.rsync = Rsync(goal.rsync_args, goal.source_path, goal.destination_path, progress_callback=self.progress_update_cb)
sel | f.result.sync_success = self.rsync.sync()
if not self.server.is_preempt_requested():
if self.rsync.stderr_block:
rospy.logerr('\n{}'.format(self.rsync.stderr_block))
rospy.loginfo("Rsync command result '%s'", self.result.sync_success)
self.server.set_succeeded(self.result)
if __name__ == "__main__":
try:
rospy.init_node('rsync_ros')
| RsyncActionServer(rospy.get_name())
rospy.spin()
except rospy.ROSInterruptException:
pass
|
katthjul/anifs | setup.py | Python | mpl-2.0 | 212 | 0.004717 | #!/usr/bin/env python
from distutils.core import setup
s | etup(name='anifs',
version='0. | 1',
packages=['anifs'],
scripts=['bin/anifs'],
install_requires=[
"adba",
],
)
|
tarikgwa/nfd | newfies/dialer_contact/templatetags/dialer_contact_tags.py | Python | mpl-2.0 | 689 | 0 | #
# Newfies-Dialer License
# http://www.newfies-di | aler.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The primary maintainer of this project is
# Arezqui Belaid <info@star2billing.com>
#
from django.template | .defaultfilters import register
@register.filter(name='contact_status')
def contact_status(value):
"""Contact status
>>> contact_status(1)
'ACTIVE'
>>> contact_status(2)
'INACTIVE'
"""
return str('ACTIVE') if value == 1 else str('INACTIVE')
|
zhengger/seafile | tests/sync-auto-test/test_cases/test_simple.py | Python | gpl-2.0 | 1,826 | 0.007119 | # | coding: utf-8
import os
import time
from . import test_util
def test_add_file():
test_util.mkfile(1, 'a.md', 'add a file')
test_util.verify_result()
def test_add_file_t():
test_util.mkfile(2, 'l/m/n/test.md', 'add l/m/n/test.md')
test_ | util.verify_result()
def test_add_dir():
test_util.mkdir(1, 'ad')
test_util.verify_result()
def test_add_dir_t():
test_util.mkdir(2, 'tt/ee/st')
test_util.verify_result()
def test_modify_file():
test_util.modfile(1, 'a.md', 'modify a.md')
test_util.verify_result()
def test_rm_file():
test_util.rmfile(1, 'a.md')
test_util.verify_result()
def test_rm_dir():
test_util.rmdir(1, 'ad')
test_util.verify_result()
def test_rename_file():
test_util.mkfile(2, 'b.md', 'add b.md')
time.sleep(1)
test_util.move(2, 'b.md', 'b_bak.md')
test_util.verify_result()
def test_rename_dir():
test_util.mkdir(2, 'ab')
time.sleep(1)
test_util.move(2, 'ab', 'ab_bak')
test_util.verify_result()
def test_each():
test_util.mkdir(1, 'abc1')
test_util.mkfile(1, 'abc1/c.md', 'add abc1/c.md')
time.sleep(1)
test_util.mkdir(2, 'bcd1')
test_util.mkfile(2, 'bcd1/d.md', 'add bcd1/d.md')
test_util.verify_result()
def test_unsync_resync():
test_util.desync_cli1()
test_util.rmdir(1, 'abc1')
test_util.modfile(1, 'bcd1/d.md', 'modify bcd1/d.md to test unsync resync')
test_util.sync_cli1()
test_util.verify_result()
if not os.path.exists(test_util.getpath(1, 'abc1')):
assert False, 'dir abc1 should be recreated when resync'
if len(os.listdir(test_util.getpath(1, 'bcd1'))) != 2:
assert False, 'should generate conflict file for bcd1/d.md when resync'
def test_modify_timestamp():
test_util.touch(1, 'bcd1/d.md')
test_util.verify_result()
|
deljuven/Guozijian | face/ImageDetector.py | Python | gpl-3.0 | 2,562 | 0 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
import time
from io import BytesIO
import requests
from PIL import Image, ImageDraw
from facepp import API, File
API_KEY = '185839fab47af2675b5e458275215a39'
API_SECRET = 'x7EXrX4c5WgIjAQ9un3SgI4-QYTad7Dx'
class ImageDetector:
''' Image Detector class demo'''
file_path = None
api = None
def __init__(self, img_url, base_path):
picture_result = requests.get(img_url)
img = Image.open(BytesIO(picture_result.content))
timestamp = time.strftime("%Y%m%d-%H%M%S", time.localtime(time.time()))
path = base_path + '/' + timestamp + '.png'
img.save(path)
self.file_path = path
self.api = API(API_KEY, API_SECRET)
def detect(self, width=5):
file = File(self.file_path)
middle_result = self.api.detection.detect(img=file)
result = self.api.wait_async(middle_result["session_id"])
img_height = result['result']['img_height']
img_width = result['result']['img_width']
faces = result['result']['face']
face_counts = len(faces)
print 'Detected ' + str(face_counts) + ' face(s)'
print 'Now drawing faces on picture...'
image = Image.open(self.file_path)
draw = ImageDraw.Draw(image)
for face in faces:
center_x = face['position']['center']['x']
center_y = face['position']['center']['y']
height = face['position']['height']
width = face['position']['width']
x0 = (center_x - 0.5 * width) * img_width * 0.01
y0 = (center_y - 0.5 * height) * img_height * 0.01
x1 = (center_x + 0.5 * width) * img_width * 0.01
y1 = (center_y + 0.5 * height) * img_height * 0.01
# # down
# draw.line([x0, y0, x1, y0], fill='red', width=width)
# # up
# draw.line([x0, y1, x1, y1], fill='red', width=width)
# # left
# draw.line([x0, y0, x0, y1], fill='red', width=width)
# # right
# draw.line([x1, y0, x1, y1], fill='red', width=width)
draw.rectangle([x0, y0, x1, y1], outline=' | red')
del draw
image.save(self.file_path)
# return: face counts
return face_counts
# if __name__ == '__main__':
# try:
# vs = VideoService()
# url = vs.take_picture()
# detector = ImageDetector(url)
# faces = detector.detect()
# detector.save_to_db(faces)
# except VideoException as e:
# | print e.msg
|
jku/telepathy-gabble | tests/twisted/vcard/update-get-failed.py | Python | lgpl-2.1 | 1,108 | 0.004513 |
"""
Test the case where the vCard get made prior to a vCard set fails.
"""
from servicetest import call_async, EventPattern
from gabbletest import (
acknowledge_iq, elem, exec_test, make_result_iq, sync_stream)
import constants as cs
import ns
def test(q, bus, conn, stream):
event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, event.stanza)
# Force Gabble to process the vCard before calling any methods.
sync_stream(q, stream)
handle = conn. | GetSelfHandle()
call_async(q, conn.Avatars, 'SetAvatar', 'william shatner',
| 'image/x-actor-name')
event = q.expect('stream-iq', iq_type='get', to=None,
query_ns='vcard-temp', query_name='vCard')
reply = make_result_iq(stream, event.stanza)
reply['type'] = 'error'
reply.addChild(elem('error')(
elem(ns.STANZA, 'forbidden')(),
elem(ns.STANZA, 'text')(u'zomg whoops')))
stream.send(reply)
event = q.expect('dbus-error', method='SetAvatar', name=cs.NOT_AVAILABLE)
if __name__ == '__main__':
exec_test(test)
|
TheCodingMonkeys/checkin-at-fmi | checkinatfmi_project/university/tests.py | Python | agpl-3.0 | 694 | 0 | """
This file demonstrates writing tests using the unittest module. | These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
from models import Place
class SimpleTest(TestCase):
def test_simple_place_creation(se | lf):
"""
Creates test place
"""
places = Place.objects.filter(name="Test Place")
[place.delete() for place in places]
place = Place()
place.name = "Test Place"
place.capacity = 20
place.save()
place = Place.objects.filter(name="Test Place")
print place
self.assertNotEqual(place, None)
|
googleapis/python-certificate-manager | samples/generated_samples/certificatemanager_v1_generated_certificate_manager_update_certificate_async.py | Python | apache-2.0 | 1,628 | 0.001843 | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for UpdateCertificate
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-certificate-manager
# [START certificatemanager_v1_generated_CertificateManager_UpdateCertificate_async]
from google.cloud import certificate_manager_v1
async def sample_update_certificate():
# Create a client
client = certificate_manager_v1.CertificateManagerAsyncClient()
# Initialize request argument(s)
request = c | ertificate_manager_v1.UpdateCertificateRequest(
)
# Make the request
operation = client.update_certificate(request=request)
print("Waiting for operation to complete...")
response = await operation.result()
# Handle the response
print(response)
# [END c | ertificatemanager_v1_generated_CertificateManager_UpdateCertificate_async]
|
gsi-upm/SmartSim | smartbody/data/examples/Tutorials/7_FacialAnimationSpeech.py | Python | apache-2.0 | 2,969 | 0.000337 | print "|--------------------------------------------|"
print "| Starting Tutorial 7 |"
print "|--------------------------------------------|"
print 'media path = ' + scene.getMediaPath()
# Add asset paths
assetManager = scene.getAssetManager()
assetManager.addAssetPath('motion', 'ChrBrad')
assetManager.addAssetPath('mesh', 'mesh')
ass | etManager.addAssetPath('script', 'scripts')
# Load assets based on asset paths
assetManager.loadAssets()
# set the camera
scene.setScale(1.0)
scene.run('default-viewer.py')
# run a Python script file
scene.run('zebra2-map.py')
zebra2Map = scene.getJointMapManager().getJointMap('zebra2')
bradSkeleton = scene.getSkeleton('ChrBrad.sk')
zebra2Map.applySkeleton(bradSkeleton) |
zebra2Map.applyMotionRecurse('ChrBrad')
# Set up Brad
brad = scene.createCharacter('ChrBrad', '')
bradSkeleton = scene.createSkeleton('ChrBrad.sk')
brad.setSkeleton(bradSkeleton)
# Set standard controller
brad.createStandardControllers()
print 'Setting up Brad\'s face definition'
bradFace = scene.createFaceDefinition('ChrBrad')
bradFace.setFaceNeutral('ChrBrad@face_neutral')
bradFace.setAU(1, "left", "ChrBrad@001_inner_brow_raiser_lf")
bradFace.setAU(1, "right", "ChrBrad@001_inner_brow_raiser_rt")
bradFace.setAU(2, "left", "ChrBrad@002_outer_brow_raiser_lf")
bradFace.setAU(2, "right", "ChrBrad@002_outer_brow_raiser_rt")
bradFace.setAU(4, "left", "ChrBrad@004_brow_lowerer_lf")
bradFace.setAU(4, "right", "ChrBrad@004_brow_lowerer_rt")
bradFace.setAU(5, "both", "ChrBrad@005_upper_lid_raiser")
bradFace.setAU(6, "both", "ChrBrad@006_cheek_raiser")
bradFace.setAU(7, "both", "ChrBrad@007_lid_tightener")
bradFace.setAU(10, "both", "ChrBrad@010_upper_lip_raiser")
bradFace.setAU(12, "left", "ChrBrad@012_lip_corner_puller_lf")
bradFace.setAU(12, "right", "ChrBrad@012_lip_corner_puller_rt")
bradFace.setAU(25, "both", "ChrBrad@025_lips_part")
bradFace.setAU(26, "both", "ChrBrad@026_jaw_drop")
bradFace.setAU(45, "left", "ChrBrad@045_blink_lf")
bradFace.setAU(45, "right", "ChrBrad@045_blink_rt")
bradFace.setViseme("open", "ChrBrad@open")
bradFace.setViseme("W", "ChrBrad@W")
bradFace.setViseme("ShCh", "ChrBrad@ShCh")
bradFace.setViseme("PBM", "ChrBrad@PBM")
bradFace.setViseme("FV", "ChrBrad@FV")
bradFace.setViseme("wide", "ChrBrad@wide")
bradFace.setViseme("tBack", "ChrBrad@tBack")
bradFace.setViseme("tRoof", "ChrBrad@tRoof")
bradFace.setViseme("tTeeth", "ChrBrad@tTeeth")
brad.setFaceDefinition(bradFace)
# Deformable mesh
brad.setDoubleAttribute('deformableMeshScale', .01)
brad.setStringAttribute('deformableMesh', 'ChrBrad.dae')
# show the character
brad.setStringAttribute('displayType', 'GPUmesh')
# Starting the simulation
sim.start()
# set posture
bml.execBML('ChrBrad', '<body posture="ChrBrad@Idle01"/>')
#bml.execBML('ChrBrad','<face type="facs" au="1_left" amount="1"/>')
sim.resume()
|
Jolopy/GimpHub | app/Gimp_Plugins_Bak/continious_test.py | Python | gpl-2.0 | 10,833 | 0.004154 | #!/usr/bin/env python
from socketIO_client import SocketIO, BaseNamespace
from gimpfu import *
import os
import time
from threading import Thread
from array import array
import requests
import configparser
import websocket
import _thread
import http.client
class GimpHubImage(object):
def __init__(self, drawable):
self.currentImage = self.get_pix()
self.drawable = drawable
self.update_suspended = False
def set_pix(self, x, y, r, g, b):
pdb.gimp_drawable_set_pixel(self.drawable, y, x, 3, [r, g, b])
def split_img_evenly(self, n):
activeImage, layer, tm, tn = self._get_active_image()
vertical = layer.height / n
srcRgn = layer.get_pixel_rgn(0, 0, layer.width, layer.height,
False, False)
# not done
def get_pix(self):
activeImage, layer, tm, tn = self._get_active_image()
srcRgn = layer.get_pixel_rgn(0, 0, layer.width, layer.height,
False, False)
src_pixels = array("B", srcRgn[0:layer.width, 0:layer.height])
imageArr = []
index = 0
for x in range(layer.width):
row = []
for y in range(layer.height):
row.append(src_pixels[index:index+3])
index += 3
imageArr.append(row)
#print src_pixels
return imageArr
def get_changes(self):
if self.update_suspended:
return []
activeImage, layer, tm, tn = self._get_active_image()
changes = []
srcRgn = layer.get_pixel_rgn(0, 0, layer.width, layer.height,
False, False)
src_pixels = array("B", srcRgn[0:layer.width, 0:layer.height])
verificationArray = []
changes = []
outerIndex = 0
print("---------------------------------------------------")
while True:
if outerIndex % 2 == 0:
changes = []
workingArr = changes
else:
verificationArray = []
workingArr = verificationArray
index = 0
for x in range(layer.width):
#row = []
for y in range(layer.height):
#row.append(src_pixels[index:index + 3])
# Save the value in the channel layers.
# print "(%s, %s) : (%r, %r, %r)" % (x, y, pixelR, pixelG, pixelB)
if self.currentImage[x][y] != src_pixels[index:index + 3]:
workingArr.append((x, y, src_pixels[index],
src_pixels[index+1],
src_pixels[index+2]))
index += 3
outerIndex += 1
if changes == verificationArray:
for change in changes:
self.currentImage[change[0]][change[1]] = array('B', change[2:5])
break
time.sleep(0.2)
return changes
def _get_active_image(self):
activeImage = gimp.image_list()[0]
layer = pdb.gimp_image_get_active_layer(activeImage)
# Calculate the number of tiles.
tn = int(layer.width / 64)
if (layer.width % 64 > 0):
tn += 1
tm = int(layer.height / 64)
if (layer.height % 64 > 0):
tm += 1
return activeImage, layer, tm, tn
def _get_img_pixels(self):
activeImage, layer, tm, tn = self._get_active_image()
imageArr = []
# Iterate over the tiles.
for i in range(tn):
for j in range(tm):
# Get the tiles.
tile = layer.get_tile(False, j, i)
# Iterate over the pixels of each tile.
for x in range(tile.ewidth):
row = []
for y in range(tile.eheight):
# Get the pixel and separate his colors.
pixel = tile[x, y]
| pixelR = pixel[0] + "\x00\x00"
| pixelG = "\x00" + pixel[1] + "\x00"
pixelB = "\x00\x00" + pixel[2]
# If the image has an alpha channel (or any other channel) copy his values.
if (len(pixel) > 3):
for k in range(len(pixel) - 3):
pixelR += pixel[k + 3]
pixelG += pixel[k + 3]
pixelB += pixel[k + 3]
# Save the value in the channel layers.
#print "(%s, %s) : (%r, %r, %r)" % (x, y, pixelR, pixelG, pixelB)
row.append([pixelR, pixelG, pixelB])
imageArr.append(row)
#print imageArr
return imageArr
class ChatNamespace(BaseNamespace):
def on_aaa_response(self, *args):
print('on_aaa_response', args)
class GimpHubLive(object):
def __init__(self, drawable, user):
#config = ConfigParser.ConfigParser()
#config.readfp(open(os.path.join(os.path.realpath(__file__), 'gimphub.ini')))
self.drawable = drawable
self.project = 'test2'
#self.user = 'paul@gmail.com'
self.user = user
#self.remote_server = "gimphub.duckdns.org"
self.remote_server = 'localhost'
self.remote_port = '5000'
self.lockfile_path = '/tmp/GHLIVE_LOCK_%s' % self.user
if os.path.exists(self.lockfile_path):
os.remove(self.lockfile_path)
#websocket.enableTrace(True)
self.running = True
self.socketIO = SocketIO(self.remote_server, self.remote_port)
self.socketIO.emit('connect')
self.chatNamespace = self.socketIO.define(ChatNamespace, '/chat')
self.chatNamespace.on('imgupdate', self.on_update)
self.chatNamespace.on('joined', self.on_joined)
self.chatNamespace.on('echo2', self.on_echo)
self.chatNamespace.emit('joined', self.user, self.project)
self.chatNamespace.emit('connect')
Thread(target=self.run_th).start()
time.sleep(2)
self.chatNamespace.emit('echo')
#
def on_update(self, obj):
print("UPDATE")
print(obj['user'] != self.user)
if obj['user'] != self.user and hasattr(self, 'GHIMG'):
self.GHIMG.update_suspended = True
for px in obj['update']:
#print px
self.GHIMG.set_pix(px[0], px[1], px[2], px[3], px[4])
pdb.gimp_drawable_update(self.drawable, 0, 0, self.drawable.width, self.drawable.height)
pdb.gimp_displays_flush()
self.GHIMG.update_suspended = False
def on_echo(self, *args):
print("ECHO")
def on_joined(self, *args):
print("JOINED")
print(args)
def run_th(self):
while True:
self.socketIO.wait(seconds=10)
if self.running is False:
print("SOCKETIO DISCONNECT")
self.socketIO.disconnect()
break
#
# def run_ws_th(self):
# self.ws.run_forever()
def send_update(self, update):
self.chatNamespace.emit('imgpush', update, self.project, self.user)
#
# url = "http://%s:%s/imgupdate" % (self.remote_server, self.remote_port)
# data = {'update': [list(x) for x in update], 'user':self.user, 'project':self.project}
# print data
# r = requests.post(url, data=data)
# print r
#
# def ws_on_message(self, ws, message):
# print message
#
# def ws_on_error(self, ws, error):
# print error
#
# def ws_on_close(self, ws):
# print "### closed ###"
#
# def ws_on_open(self, ws):
# def run(*args):
# ws.send("joined", self.user, self.project)
# while True:
# time.sleep(1)
# if self.running is False:
# ws.close()
# print "thread terminated"
# return Non |
icarrera/django-imager | imagersite/imager_images/migrations/0005_photo.py | Python | mit | 1,508 | 0.003979 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-13 14:53
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('imager_images', '0004_auto_20160413_1452'),
]
operations = [
migrations.CreateModel(
name='Photo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(null=True, upload_to='photo_files/%Y-%m-%d')),
('title', models.CharField(blank=True, max_length=255)),
('description', models.TextField(blank=True)),
('date_uploaded', models.DateTimeField(auto_now_add=True)),
('date_modified', models.DateTimeField(auto_now=True)),
('date_publi | shed', models.DateTimeField(blank=True, null=True)),
('published', models.CharField(choices=[('private', 'private'), ('share | d', 'shared'), ('public', 'public')], default='private', max_length=255)),
('photos', models.ManyToManyField(related_name='photos', to='imager_images.Album')),
('user', models.ForeignKey(default=None, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
Karaage-Cluster/karaage | karaage/management/commands/vacant_projects.py | Python | gpl-3.0 | 1,034 | 0.001934 | import sys
import django.db.transaction
from django.core.management.base import BaseCommand
from karaage.projects.models import Project
class Command(BaseCommand):
help = "return a list of projects with no currently active users"
@django.db.transaction.non_atomic_requests
def handle(self, *args, **options):
badProjects = 0
| for selectedProject in Project.objects.all():
members = selectedProject.group.members.all()
memberCount = 0
invalidCount = 0
for person in members:
if person.is_active and person.login_enabled:
memberCount += 1
else:
invalidCount += 1
if memberCount == 0:
badProjects += 1
sys. | stdout.write("{}: {} locked users".format(selectedProject.pid, invalidCount))
sys.stdout.write("\n")
sys.stdout.write("{} inactive/unpopulated projects\n".format(badProjects))
sys.stdout.write("Done\n")
|
mbayon/TFG-MachineLearning | venv/lib/python3.6/site-packages/scipy/optimize/_trustregion_ncg.py | Python | mit | 4,646 | 0 | """Newton-CG trust-region optimization."""
from __future__ import division, print_function, absolute_import
import math
import numpy as np
import scipy.linalg
from ._trustregion import (_minimize_trust_region, BaseQuadraticSubproblem)
__all__ = []
def _minimize_trust_ncg(fun, x0, args=(), jac=None, hess=None, hessp=None,
**trust_region_options):
"""
Minimization of scalar function of one or more variables using
the Newton conjugate gradient trust-region algorithm.
Options
-------
initial_trust_radius : float
Initial trust-region radius.
max_trust_radius : float
Maximum value of the trust-region radius. No steps that are longer
than this value will be proposed.
eta : float
Trust region related acceptance stringency for proposed steps.
gtol : float
Gradient norm must be less than `gtol` before successful
termination.
"""
if jac is None:
raise ValueError('Jacobian is required for Newton-CG trust-region '
'minimization')
if hess is None and hessp is None:
raise ValueError('Either the Hessian or the Hessian-vector product '
'is required for Newton-CG trust-region minimization')
return _minimize_trust_region(fun, x0, args=args, jac=jac, hess=hess,
hessp=hessp, subproblem=CGSteihaugSubproblem,
**trust_region_options)
class CGSteihaugSubproblem(BaseQuadraticSubproblem):
"""Quadratic subproblem solved by a conjugate gradient method"""
def solve(self, trust_radius):
"""
Solve the subproblem using a conjugate gradient method.
Parameters
----------
trust_radius : float
We are allowed to wander only this far away from the origin.
Returns
-------
p : ndarray
The proposed step.
hits_boundary : bool
True if the proposed step is on the boundary of the trust region.
Notes
-----
This is algorithm (7.2) of Nocedal and Wright 2nd edition.
Only the function that computes the Hessian-vector product is required.
The Hessian itself is not required, and the Hessian does
not need to be positive semidefinite.
"""
# get the norm of jacobian and define the origin
p_origin = np.zeros_like(self.jac)
# define a default tolerance
tolerance = min(0.5, math.sqrt(self.jac_mag)) * self.jac_mag
# Stop the method if the search direction
# is a direction of nonpositive curvature.
if self.jac_mag < tolerance:
hits_boundary = False
return p_origin, hits_boundary
# init the state for the first iteration
z = p_origin
r = self.jac
d = -r
# Search for the min of the approximation of the objective function.
while True:
| # do an iteration
Bd = self.hessp(d)
dBd = np.dot(d, Bd)
if dBd <= 0:
# Look at the two boundary points.
# Find both values of t to get the boundary points such that
# ||z + t d|| == trust_radius
# and then choose the one with the predicted min value.
ta, tb = sel | f.get_boundaries_intersections(z, d, trust_radius)
pa = z + ta * d
pb = z + tb * d
if self(pa) < self(pb):
p_boundary = pa
else:
p_boundary = pb
hits_boundary = True
return p_boundary, hits_boundary
r_squared = np.dot(r, r)
alpha = r_squared / dBd
z_next = z + alpha * d
if scipy.linalg.norm(z_next) >= trust_radius:
# Find t >= 0 to get the boundary point such that
# ||z + t d|| == trust_radius
ta, tb = self.get_boundaries_intersections(z, d, trust_radius)
p_boundary = z + tb * d
hits_boundary = True
return p_boundary, hits_boundary
r_next = r + alpha * Bd
r_next_squared = np.dot(r_next, r_next)
if math.sqrt(r_next_squared) < tolerance:
hits_boundary = False
return z_next, hits_boundary
beta_next = r_next_squared / r_squared
d_next = -r_next + beta_next * d
# update the state for the next iteration
z = z_next
r = r_next
d = d_next
|
pronexo-odoo/odoo-argentina | l10n_ar_receipt/report/receipt_print.py | Python | agpl-3.0 | 1,664 | 0.003005 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) | 2012 Silvina Fa | ner (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
import netsvc
class report_receipt_print(report_sxw.rml_parse):
_name = 'report.receipt.print'
def __init__(self, cr, uid, name, context):
super(report_receipt_print, self).__init__(cr, uid, name, context)
self.localcontext.update({
'time': time,
'convert': self.convert,
})
def convert(self, amount, currency): return self.pool.get('ir.translation').amount_to_text(amount, 'pe', currency or 'Pesos')
report_sxw.report_sxw(
'report.receipt.print',
'receipt.receipt',
'trunk/receipt_pay/report/receipt_pay_print.rml',
parser=report_receipt_print,header="external"
)
|
fareskalaboud/pybugger | pybugger/pybugger.py | Python | gpl-3.0 | 4,133 | 0.003629 | import random
from pybugger import myaixterm
color = myaixterm
color.aix_init()
def string_constructor(args, foreground="normal", background="normal"):
if foreground != "rainbow":
foreground = "" if foreground == "normal" else color.aix_fg(foreground)
background = "" if background == "normal" else color.aix_bg(background)
res = foreground + background
for arg in args:
res += arg
res = res + color.aix_normal()
return res
else:
colors = color.get_all_colors()
res = ""
for arg in args:
res += arg
rainbow_string = ""
for character in list(res):
foreground = color.aix_bg(colors[getRandomKey(colors)])
background = color.aix_fg(colors[getRandomKey(colors)])
rainbow_string += foreground + background + character
rainbow_string += color.aix_normal()
return rainbow_string
def getRandomKey(dictionary):
return random.sample(list(dictionary), 1).pop()
def default(*args):
"""Format the arguments with a default forgreound and background."""
print(string_constructor(args))
def success(*args):
"""Format the arguments with a green forgreound."""
print(string_constructor(args, "green"))
def mega_success(*args):
"""Format the arguments with a white forgreound and a green background."""
print(string_constructor(args, "white", "green"))
def warning(*args):
"""Format the arguments with a yellow forgreound."""
print(string_constructor(args, "yellow"))
def mega_warning(*args):
"""Format the arguments with a white forgreound and a yellow background."""
print(string_constructor(args, "black", "fullyellow"))
def info(*args):
"""Format the arguments with a cyan forgreound."""
print(string_constructor(args, "cyan"))
def mega_info(*args):
"""Format the arguments with a white forgreound and a cyan background."""
print(string_constructor(args, "white", "cyan"))
def error(*args):
"""Format the arguments with a red forgreound."""
print(string_constructor(args, "brightred"))
def mega_error(*args):
"""Format the arguments with a white forgreound and a red background."""
print(string_constructor(args, "white", "red"))
def randomize(*args):
"""Format the arguments with a random forgreound and background."""
print(string_constructor(args, "rainbow"))
def inverted(*args):
"""Format the arguments with a black foreground and white background."""
print(string_constructor(args, "black", "white"))
def custom(*args, delimiter='', fg="normal", bg="normal"):
"""Format the arguments with a custom foreground and background."""
debug_str = delimiter.join(args)
print(string_constructor(debug_str, fg, bg))
def test():
"""A test method to print out examples."""
print("")
print("pybugger.success(*lyric)")
success("\"We're no strangers to love,")
print("")
print("pybugger.mega_success(*lyric)")
mega_success("You know the rules and so do I")
print("")
print("pybugger.info(*lyric)")
info("A full commitment's what I'm thinking of")
print("")
print("pybugger.mega_info(*lyric)")
mega_info("You wouldn't get this from any other guy")
print("")
print("pybugger.warning(*lyric)")
warning("I just wanna tell you how I'm feeling")
print("")
print("pybugger.mega_warning(*lyric)")
mega_warning("Gotta make you understand,")
print("")
print("pybugger.error(*lyric)")
error("Never gonna give you up")
print("")
print("pybugger.mega_error(*lyric)")
mega_error("Never gonna let you down")
print("")
print("pybugger.randomize(*lyric)")
randomize("Never gonna run around and deser | t you")
print("")
print("pybugger.custom(lyric, \"color119\", \"color93\")")
custom("Never gonna make you cry", "color119", "color93")
print("")
print("pybugger.inverted(*lyric)")
inverted("Never gonna say goodbye.")
print("")
print("pybugger.default(*lyric)") |
default("Never gonna tell a lie and hurt you.\"")
print("")
|
ael-code/libreant | webant/api/archivant_api.py | Python | agpl-3.0 | 6,972 | 0.001865 | import json
import tempfile
import os
from werkzeug import secure_filename
from webant.util import send_attachment_file, routes_collector
from flask import request, current_app, url_for, jsonify
from archivant import Archivant
from archivant.exceptions import NotFoundException
from util import ApiError, make_success_response
routes = []
route = routes_collector(routes)
@route('/volumes/')
def get_volumes():
q = request.args.get('q', "*:*")
try:
from_ = int(request.args.get('from', 0))
except ValueError:
raise ApiError("Bad Request", 400, details="could not covert 'from' parameter to number")
try:
size = int(request.args.get('size', 10))
except ValueError:
raise ApiError("Bad Request", 400, details="could not covert 'size' parameter to number")
if size > current_app.config.get('MAX_RESULTS_PER_PAGE', 50):
raise ApiError("Request Entity Too Large", 413, details="'size' parameter is too high")
q_res = current_app.archivant._db.get_books_querystring(query=q, from_=from_, size=size)
volumes = map(Archivant.normalize_volume, q_res['hits']['hits'])
next_args = "?q={}&from={}&size={}".format(q, from_ + size, size)
prev_args = "?q={}&from={}&size={}".format(q, from_ - size if ((from_ - size) > -1) else 0, size)
base_url = url_for('.get_volumes', _external=True)
res = {'link_prev': base_url + prev_args,
'link_next': base_url + next_args,
'total': q_res['hits']['total'],
'data': volumes}
return jsonify(res)
@route('/volumes/', methods=['POST'])
def add_volume():
metadata = receive_volume_metadata()
try:
volumeID = current_app.archivant.insert_volume(metadata)
except ValueError, e:
raise ApiError("malformed metadata", 400, details=str(e))
link_self = url_for('.get_volume', volumeID=volumeID, _external=True)
response = jsonify({'data': {'id': volumeID, 'link_self': link_self}})
response.status_code = 201
response.headers['Location'] = link_self
return response
@route('/volumes/<volumeID>', methods=['PUT'])
def update_volume(volumeID):
metadata = receive_volume_metadata()
try:
current_app.archivant.update_volume(volumeID, metadata)
except NotFoundException, e:
raise ApiError("volume not found", 404, details=str(e))
except ValueError, e:
raise ApiError("malformed metadata", 400, details=str(e))
return make_success_response("volume successfully updated", 201)
@route('/volumes/<volumeID>', methods=['GET'])
def get_volume(volumeID):
try:
volume = current_app.archivant.get_volume(volumeID)
except NotFoundException, e:
raise ApiError("volume not found", 404, details=str(e))
return jsonify({'data': volume})
@route('/volumes/<volumeID>', methods=['DELETE'])
def delete_volume(volumeID):
try:
current_app.archivant.delete_volume(volumeID)
except NotFoundException, e:
raise ApiError("volume not found", 404, details=str(e))
return make_success_response("volume has been successfully deleted")
@route('/volumes/<volumeID>/attachments/', methods=['GET'])
def get_attachments(volumeID):
try:
atts = current_app.archivant.get_volume(volumeID)['attachments']
except NotFoundException, e:
raise ApiError("volume not found", 404, details=str(e))
return jsonify({'data': atts})
@route('/volumes/<volumeID>/attachments/', methods=['POST'])
def add_attachments(volumeID):
metadata = receive_metadata(optional=True)
if 'file' not in request.files:
raise ApiError("malformed request", 400, details="file not found under 'file' key")
upFile = request.files['file']
tmpFileFd, tmpFilePath = tempfile.mkstemp()
upFile.save(tmpFilePath)
fileInfo = {}
fileInfo['file'] = tmpFilePath
fileInfo['name'] = secure_filename(upFile.filename)
fileInfo['mime'] = upFile.mimetype
fileInfo['notes'] = metadata.get('notes', '')
# close fileDescriptor
os.close(tmpFileFd)
try:
attachmentID = current_app.archivant.insert_attachments(volumeID, attachments=[fileInfo])[0]
except NotFoundException, e:
raise Ap | iError("volume not found", 404, details=str(e))
finally:
# remove temp files
os.remove(fileInfo['file'])
link_self = url_for('.get_attachment', volumeID=volumeID, attachmentID=attach | mentID, _external=True)
response = jsonify({'data': {'id': attachmentID, 'link_self': link_self}})
response.status_code = 201
response.headers['Location'] = link_self
return response
@route('/volumes/<volumeID>/attachments/<attachmentID>', methods=['GET'])
def get_attachment(volumeID, attachmentID):
try:
att = current_app.archivant.get_attachment(volumeID, attachmentID)
except NotFoundException, e:
raise ApiError("attachment not found", 404, details=str(e))
return jsonify({'data': att})
@route('/volumes/<volumeID>/attachments/<attachmentID>', methods=['DELETE'])
def delete_attachment(volumeID, attachmentID):
try:
current_app.archivant.delete_attachments(volumeID, [attachmentID])
except NotFoundException, e:
raise ApiError("attachment not found", 404, details=str(e))
return make_success_response("attachment has been successfully deleted")
@route('/volumes/<volumeID>/attachments/<attachmentID>', methods=['PUT'])
def update_attachment(volumeID, attachmentID):
metadata = receive_metadata()
try:
current_app.archivant.update_attachment(volumeID, attachmentID, metadata)
except ValueError, e:
raise ApiError("malformed request", 400, details=str(e))
return make_success_response("attachment has been successfully updated")
@route('/volumes/<volumeID>/attachments/<attachmentID>/file', methods=['GET'])
def get_file(volumeID, attachmentID):
try:
return send_attachment_file(current_app.archivant, volumeID, attachmentID)
except NotFoundException, e:
raise ApiError("file not found", 404, details=str(e))
def receive_volume_metadata():
metadata = receive_metadata()
# TODO check also for preset consistency?
requiredFields = ['_language']
for requiredField in requiredFields:
if requiredField not in metadata:
raise ApiError("malformed metadata", 400, details="Required field '{}' is missing in metadata".format(requiredField))
return metadata
def receive_metadata(optional=False):
if optional and 'metdata' not in request.values:
return {}
try:
metadata = json.loads(request.values['metadata'])
except KeyError:
raise ApiError("malformed request", 400, details="missing 'metadata' in request")
except Exception, e:
raise ApiError("malformed metadata", 400, details=str(e))
if not isinstance(metadata, dict):
raise ApiError("malformed metadata", 400, details="metadata value should be a json object")
return metadata
|
Azure/azure-sdk-for-python | sdk/cognitivelanguage/azure-ai-language-questionanswering/azure/ai/language/questionanswering/projects/aio/_configuration.py | Python | mit | 3,321 | 0.004517 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any
from azure.core.configuration import Configuration
from azure.core.credentials import AzureKeyCredential
from azure.core.pipeline import policies
from .._version import VERSION
class QuestionAnsweringProjectsClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for QuestionAnsweringProjectsClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param endpoint: Supported Cognitive Services endpoint (e.g.,
https://:code:`<resource-name>`.api.cognitiveservices.azure.com).
:type endpoint: str
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials.AzureKeyCredential
:keyword api_version: Api Version. The default value is "2021-10-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(
self,
endpoint: str,
credential: AzureKeyCredential,
**kwargs: Any
) -> None:
super(QuestionAnsweringProjectsClientConfiguration, self).__init__(**kwargs)
api_version = kwargs.pop('api_version', "2021-10-01") # type: str
if endpoint is None:
raise ValueError("Parameter 'endpoint' must not be None.")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
self.endpoint = endpoint
self.credential = credential
self.api_version = api_version
kwargs.setdefault('sdk_moniker', 'ai-language-que | stionanswering/{}'.format(VERSION))
self._configure(**kwargs)
def _configure(
| self,
**kwargs: Any
) -> None:
self.user_agent_policy = kwargs.get('user_agent_policy') or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get('headers_policy') or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get('proxy_policy') or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get('logging_policy') or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get('http_logging_policy') or policies.HttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get('retry_policy') or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get('custom_hook_policy') or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get('redirect_policy') or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get('authentication_policy')
if self.credential and not self.authentication_policy:
self.authentication_policy = policies.AzureKeyCredentialPolicy(self.credential, "Ocp-Apim-Subscription-Key", **kwargs)
|
idlesign/django-sitetree | sitetree/management/commands/sitetreedump.py | Python | bsd-3-clause | 2,050 | 0.003902 | from django.core import serializers
from django.core.management.base import BaseCommand, CommandError
from django.db import DEFAULT_DB_ALIAS
from sitetree.utils import get_tree_model, get_tree_item_model
from sitetree.compat import CommandOption, option | s_getter
MODEL_TREE_CLASS = get_tree_model()
MODEL_TREE_ITEM_CLASS = get_tree_item_model()
get_options = options_getter((
CommandOption(
'--indent', default=None, dest='indent', type=int,
help='Specifies the indent level to use when pretty-printing outpu | t.'),
CommandOption('--items_only', action='store_true', dest='items_only', default=False,
help='Export tree items only.'),
CommandOption('--database', action='store', dest='database', default=DEFAULT_DB_ALIAS,
help='Nominates a specific database to export fixtures from. Defaults to the "default" database.'),
))
class Command(BaseCommand):
option_list = get_options()
help = 'Output sitetrees from database as a fixture in JSON format.'
args = '[tree_alias tree_alias ...]'
def add_arguments(self, parser):
parser.add_argument('args', metavar='tree', nargs='*', help='Tree aliases.', default=[])
get_options(parser.add_argument)
def handle(self, *aliases, **options):
indent = options.get('indent', None)
using = options.get('database', DEFAULT_DB_ALIAS)
items_only = options.get('items_only', False)
objects = []
if aliases:
trees = MODEL_TREE_CLASS._default_manager.using(using).filter(alias__in=aliases)
else:
trees = MODEL_TREE_CLASS._default_manager.using(using).all()
if not items_only:
objects.extend(trees)
for tree in trees:
objects.extend(MODEL_TREE_ITEM_CLASS._default_manager.using(using).filter(tree=tree).order_by('parent'))
try:
return serializers.serialize('json', objects, indent=indent)
except Exception as e:
raise CommandError(f'Unable to serialize sitetree(s): {e}')
|
Outernet-Project/squery-lite | tests/test_squery.py | Python | bsd-3-clause | 15,587 | 0 | import mock
import pytest
from squery_lite import squery as mod
MOD = mod.__name__
@mock.patch(MOD + '.sqlite3', autospec=True)
def test_connection_object_connects(sqlite3):
""" Connection object starts a connection """
conn = mod.Connection('foo.db')
sqlite3.connect.assert_called_once_with(
'foo.db', detect_types=sqlite3.PARSE_DECLTYPES)
assert conn._conn.isolation_level is None
conn._conn.cursor().execute.assert_called_once_with(
'PRAGMA journal_mode=WAL;')
@mock.patch(MOD + '.sqlite3', autospec=True)
def test_connection_repr(*ignored):
""" Connection object has human-readable repr """
conn = mod.Connection('foo.db')
assert repr(conn) == "<Connection path='foo.db'>"
@mock.patch(MOD + '.sqlite3', autospec=True)
def test_connection_object_remebers_dbpath(sqlite3):
""" Connection object can remember the database path """
conn = mod.Connection('foo.db')
assert conn.path == 'foo.db'
@mock.patch(MOD + '.sqlite3', autospec=True)
def test_connection_has_sqlite3_connection_api(sqlite3):
""" Connection object exposes sqlite3.Connection methods and props """
conn = mod.Connection('foo.db')
assert conn.cursor == sqlite3.connect().cursor
assert conn.isolation_level == sqlite3.connect().isolation_level
@mock.patch(MOD + '.sqlite3', autospec=True)
def test_connection_close(sqlite3):
""" Connection object commits before closing """
conn = mod.Connection('foo.db')
conn.close()
assert sqlite3.connect().commit.called
assert sqlite3.connect().close.called
@mock.patch(MOD + '.sqlite3', autospec=True)
def test_can_set_attributes_on_underlying_connection(sqlite3):
""" Attributes set on the Connection instance are mirrored correctly """
conn = mod.Connection('foo.db')
conn.isolation_level = None
assert conn.isolation_level == conn._conn.isolation_level
conn.isolation_level = 'EXCLUSIVE'
assert conn.isolation_level == conn._conn.isolation_level
@mock.patch(MOD + '.sqlite3', autospec=True)
def test_can_clone_connection(sqlite3):
""" Duplicate connection objects can be created with new() method """
conn = mod.Connection('foo.db')
assert sqlite3.connect.call_count == 1
conn2 = conn.new()
assert sqlite3.connect.call_count == 2
assert conn is not conn2
assert conn.path == conn2.path
def test_registering_custom_function():
""" Connection can register custom functions """
def addtwo(s):
return s + 2
conn = mod.Connection(':memory:', funcs=[addtwo])
cur = mod.Cursor(conn)
cur.execute('create table foo(i)')
cur.execute('insert into foo values (1)')
cur.execute('insert into foo values (2)')
cur.execute('insert into foo values (3)')
cur.execute('insert into foo values (4)')
cur.execute('insert into foo values (5)')
cur.execute('select addtwo(i) as a from foo order by i')
assert [r.a for r in cur] == [3, 4, 5, 6, 7]
def test_registering_custom_function_with_method():
""" Connection can register custom functions """
def addtwo(s):
return s + 2
conn = mod.Connection(':memory:')
conn.add_func(addtwo)
cur = mod.Cursor(conn)
cur.execute('create table foo(i)')
cur.execute('insert into foo values (1)')
cur.execute('insert into foo values (2)')
cur.execute('insert into foo values (3)')
cur.execute('insert into foo values (4)')
cur.execute('insert into foo values (5)')
cur.execute('select addtwo(i) as a from foo order by i')
assert [r.a for r in cur] == [3, 4, 5, 6, 7]
def test_registering_custom_callable():
""" Connection can register custom functions as callables """
class AddTwo(object):
def __call__(self, s):
return s + 2
conn = mod.Connection(':memory:', funcs=[AddTwo()])
cur = mod.Cursor(conn)
cur.execute('create table foo(i)')
cur.execute('insert into foo values (1)')
cur.execute('insert into foo values (2)')
cur.execute('insert into foo values (3)')
cur.execute('insert into foo values (4)')
cur.execute('insert into foo values (5)')
cur.execute('select addtwo(i) as a from foo order by i')
assert [r.a for r in cur] == [3, 4, 5, 6, 7]
def test_registering_custom_aggregate():
""" Connection can register custom aggregate """
class Concat(object):
def __init__(self):
self.s = ''
def step(self, s):
self.s += str(s)
def finalize(self):
return self.s
conn = mod.Connection(':memory:', aggregates=[Concat])
cur = mod.Cursor(conn)
cur.execute('create table foo(i)')
cur.execute("insert into foo values ('a')")
cur.execute("insert into foo values ('b')")
| cur.execute("insert into foo values ('c')")
cur.execute("insert into foo values ('d')")
cur.execute("insert into foo values ('e')")
cur.execute("select concat(i) as a from foo order by i")
assert cur.re | sult.a == 'abcde'
@mock.patch(MOD + '.sqlite3')
def test_db_connect(sqlite3):
mod.Database.connect('foo.db')
sqlite3.connect.assert_called_once_with(
'foo.db', detect_types=sqlite3.PARSE_DECLTYPES)
@mock.patch(MOD + '.sqlite3')
def test_db_uses_dbdict(sqlite3):
""" The database will use a dbdict_factory for all rows """
conn = mod.Database.connect('foo.db')
assert conn.row_factory == mod.Row
@mock.patch(MOD + '.sqlite3')
def test_init_db_with_connection(*ignored):
""" Database object is initialized with a connection """
conn = mock.Mock()
db = mod.Database(conn)
assert db.conn == conn
@mock.patch(MOD + '.sqlite3')
def test_get_cursor(*ignored):
""" Obtaining curor should return connection's cursor object """
db = mod.Database(mock.Mock())
cur = db.cursor()
assert cur.cursor == db.conn.cursor.return_value
@mock.patch(MOD + '.sqlite3')
def test_get_curor_only_retrieved_once(sqlite3):
""" Cursor is retrieved every time """
db = mod.Database(mock.Mock())
db.cursor()
db.cursor()
assert db.conn.cursor.call_count == 2
@mock.patch(MOD + '.sqlite3')
def test_convert_sqlbuilder_class_to_repr(*ignored):
""" When sqlbuilder object is passed as query, it's converted to repr """
@mod.convert_query
def with_query(self, q):
return q
self = mock.Mock() # because convert_query is a method deco
select = mock.Mock(spec=mod.Select)
select.serialize.return_value = 'SELECT * FROM foo;'
sql = with_query(self, select)
assert sql == select.serialize.return_value
@mock.patch(MOD + '.sqlite3')
def test_convert_string_query(*ignored):
""" When raw SQL sting is passed, it's not conveted """
@mod.convert_query
def with_query(self, q):
return q
s = 'foobar'
self = mock.Mock() # because convert_query is a method deco
sql = with_query(self, s)
assert s is sql
@mock.patch(MOD + '.sqlite3')
@mock.patch(MOD + '.Cursor')
def test_query(*ignored):
""" query() should execute a database query """
db = mod.Database(mock.Mock())
cursor = db.query('SELECT * FROM foo;')
cursor.query.assert_called_once_with('SELECT * FROM foo;')
@mock.patch(MOD + '.sqlite3')
def test_query_execute(*ignored):
""" query() should execute a database query """
db = mod.Database(mock.Mock())
cursor = db.query('SELECT * FROM foo;')
cursor.cursor.execute.assert_called_once_with('SELECT * FROM foo;', {})
@mock.patch(MOD + '.sqlite3')
@mock.patch(MOD + '.Cursor')
def test_query_params(*ignored):
""" Query converts positional arguments to params list """
db = mod.Database(mock.Mock())
cursor = db.query('SELECT * FROM foo WHERE bar = ?;', 12)
cursor.query.assert_called_once_with(
'SELECT * FROM foo WHERE bar = ?;', 12)
@mock.patch(MOD + '.sqlite3')
def test_query_params_execute(*ignored):
""" Query converts positional arguments to params list """
db = mod.Database(mock.Mock())
cursor = db.query('SELECT * FROM foo WHERE bar = ?;', 12)
cursor.cursor.execute.assert_called_once_with(
'SELECT * FROM foo WHERE bar = ?;', (12,))
@mock.patch(MOD + '.sqlite3')
@mock.patch(MOD + '.Cur |
VRSandeep/icrs | website/urls.py | Python | mit | 191 | 0 | fr | om django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.home),
url(r'^interviewer/$', views.interviewer),
url(r'^candidate/$', views. | candidate),
]
|
all-of-us/raw-data-repository | rdr_service/lib_fhir/fhirclient_1_0_6/models/identifier.py | Python | bsd-3-clause | 2,304 | 0.009549 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 1.0.2.7202 (http://hl7.org/fhir/StructureDefinition/Identifier) on 2016-06-23.
# 2016, SMART Health IT.
from . import element
class Identifier(element.Element):
""" An identifier intended for computation.
A technical identifier - identifies some entity uniquely and unambiguously.
"""
resource_name = "Identifier"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.assigner = None
""" Organization that issued id (may be just text).
Type `FHIRReference` referencing `Organization` (represented as `dict` in JSON). """
self.period = None
""" Time period when id is/was valid for use.
Type `Period` (represented as `dict` in JSON). """
self.system = None
""" The namespace for the identifier.
Type `str`. """
self.type = None
""" Description of identifier.
Type `CodeableConcept` (represe | nted as `dict` in JSON). """
self.use = None
""" usual | official | temp | secondary (If known).
Type `str`. """
self.value = No | ne
""" The value that is unique.
Type `str`. """
super(Identifier, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(Identifier, self).elementProperties()
js.extend([
("assigner", "assigner", fhirreference.FHIRReference, False, None, False),
("period", "period", period.Period, False, None, False),
("system", "system", str, False, None, False),
("type", "type", codeableconcept.CodeableConcept, False, None, False),
("use", "use", str, False, None, False),
("value", "value", str, False, None, False),
])
return js
from . import codeableconcept
from . import fhirreference
from . import period
|
galad-loth/DescHash | DeepHash/common/data.py | Python | apache-2.0 | 3,986 | 0.035625 | import numpy as npy
import struct
import os
from scipy import io as scio
from mxnet import io as mxio
def ReadFvecs(dataPath, dataFile, start=0, end=-1):
filePath=os.path.join(dataPath,dataFile)
with open(filePath,mode="rb") as fid:
buf=fid.read(4)
dimFeat=struct.unpack("i", buf[:4])
numVecByte=(dimFeat[0]+1)*4
fid.seek(0,2)
numVec=fid.tell()/numVecByte
if end<0:
end=numVec
if (start<0 or start>numVec or en | d>numVec or start>end):
print("Start/End index is out of the data range")
numDataEntry=(end-start)*(dimFeat[0]+1)
numReadByte=numDataEntry*4
fid.seek(start*numVecByte,0)
buf=fid.read(numReadByte)
data=npy.array(struct.unpack("f"*numDataEntry, buf[:numReadByte]))
data=data.reshape((numVec, dimFeat[0]+1))
data=data[:,1:]
return data
def ReadIvecs(dataPath, dataFile, start=0, end=-1):
filePath=os.path.join(dataPa | th,dataFile)
with open(filePath,mode="rb") as fid:
buf=fid.read(4)
dimFeat=struct.unpack("i", buf[:4])
numVecByte=(dimFeat[0]+1)*4
fid.seek(0,2)
numVec=fid.tell()/numVecByte
if end<0:
end=numVec
if (start<0 or start>numVec or end>numVec or start>end):
print("Start/End index is out of the data range")
numDataEntry=(end-start)*(dimFeat[0]+1)
numReadByte=numDataEntry*4
fid.seek(start*numVecByte,0)
buf=fid.read(numReadByte)
data=npy.array(struct.unpack("i"*numDataEntry, buf[:numReadByte]))
data=data.reshape((numVec, dimFeat[0]+1))
data=data[:,1:]
return data
def ReadCIFAR10Gist(dataPath):
dataTemp=scio.loadmat(os.path.join(dataPath,"cifar10_test_batch.mat"))
testData=dataTemp["gistFeat"]
testLabel=npy.ravel(dataTemp["labels"])
dataTemp=scio.loadmat(os.path.join(dataPath,"cifar10_train_batch1.mat"))
trainData1=dataTemp["gistFeat"]
trainLabel1=npy.ravel(dataTemp["labels"])
dataTemp=scio.loadmat(os.path.join(dataPath,"cifar10_train_batch2.mat"))
trainData2=dataTemp["gistFeat"]
trainLabel2=npy.ravel(dataTemp["labels"])
dataTemp=scio.loadmat(os.path.join(dataPath,"cifar10_train_batch3.mat"))
trainData3=dataTemp["gistFeat"]
trainLabel3=npy.ravel(dataTemp["labels"])
dataTemp=scio.loadmat(os.path.join(dataPath,"cifar10_train_batch4.mat"))
trainData4=dataTemp["gistFeat"]
trainLabel4=npy.ravel(dataTemp["labels"])
dataTemp=scio.loadmat(os.path.join(dataPath,"cifar10_train_batch5.mat"))
trainData5=dataTemp["gistFeat"]
trainLabel5=npy.ravel(dataTemp["labels"])
trainData=npy.concatenate((trainData1,trainData2,trainData3,trainData4,trainData5),axis=0)
trainLabel=npy.concatenate((trainLabel1,trainLabel2,trainLabel3,trainLabel4,trainLabel5))
return (trainData, trainLabel,testData,testLabel)
def SiftSmallIter(dataPath, trainNum, valNum, batchSize):
data=ReadFvecs(dataPath,"siftsmall_learn.fvecs")
data=data.astype(npy.float32)*0.01
ndata=data.shape[0]
ntrain=npy.minimum(trainNum,20000)
nval=npy.minimum(valNum,5000)
idxRand=npy.arange(ndata)
npy.random.shuffle(idxRand)
trainIter=mxio.NDArrayIter(
data=data[idxRand[:ntrain],:],
batch_size=batchSize,
shuffle=True,
last_batch_handle="discard")
valIter=mxio.NDArrayIter(
data=data[idxRand[ntrain:ntrain+nval],:],
batch_size=batchSize,
shuffle=False,
last_batch_handle="discard")
return (trainIter, valIter)
if __name__=="__main__":
dataPath="E:\\DevProj\\Datasets\\SIFT1M\\siftsmall"
trainIter, valIter=SiftSmallIter(dataPath,21000,4000,50)
|
mmpagani/oq-hazardlib | openquake/hazardlib/tests/gsim/utils_test.py | Python | agpl-3.0 | 2,240 | 0 | # The Hazard Library
# Copyright (C) 2014, GEM Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURP | OSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
import numpy
from openquake.hazardlib.gsim.utils import (
mblg_to_mw_johnston_96, mblg_to_mw_atkinson_boore_87, clip_mean
)
from openquake.hazardlib.imt import PGA, SA
class MblgToMwTestCase(unittest | .TestCase):
def test_mblg_to_mw_johnston_96(self):
mblg = 5
mw = mblg_to_mw_johnston_96(mblg)
self.assertAlmostEqual(mw, 4.6725)
def test_mblg_to_mw_atkinson_boore_87(self):
mblg = 5
mw = mblg_to_mw_atkinson_boore_87(mblg)
self.assertAlmostEqual(mw, 4.5050)
class ClipMeanTestCase(unittest.TestCase):
def test_clip_mean(self):
mean = numpy.array([0.1, 0.2, 0.6, 1.2])
imt = PGA()
clipped_mean = clip_mean(imt, mean)
numpy.testing.assert_allclose(
[0.1, 0.2, 0.405, 0.405], clipped_mean
)
mean = numpy.array([0.1, 0.2, 0.6, 1.2])
imt = SA(period=0.1, damping=5.)
clipped_mean = clip_mean(imt, mean)
numpy.testing.assert_allclose(
[0.1, 0.2, 0.6, 1.099], clipped_mean
)
mean = numpy.array([0.1, 0.2, 0.6, 1.2])
imt = SA(period=0.6, damping=5.)
clipped_mean = clip_mean(imt, mean)
numpy.testing.assert_allclose(
[0.1, 0.2, 0.6, 1.2], clipped_mean
)
mean = numpy.array([0.1, 0.2, 0.6, 1.2])
imt = SA(period=0.01, damping=5.)
clipped_mean = clip_mean(imt, mean)
numpy.testing.assert_allclose(
[0.1, 0.2, 0.6, 1.2], clipped_mean
)
|
mastizada/kuma | vendor/packages/feedparser/docs/add_custom_css.py | Python | mpl-2.0 | 125 | 0 | # Makes Sphinx cr | eate a <link> to feedparser.css in the HTML output
def setup(app):
app.add_stylesheet('feedparser.cs | s')
|
RyanDJLee/pyta | examples/invalid_range_index_example.py | Python | gpl-3.0 | 197 | 0 | for i in range(0):
i += 1
for j | in range(0, 1, 3):
j += 1
for k in range(9, 1, -9):
k += 1
for n | in range(0, 1.1): # Error on this line
n += 1
for m in range(4, 5):
m += 1
|
Br1an6/ACS_Netplumber_Implementation | hassel-c/net_plumbing/examples/stanford/generate_rules_json_file.py | Python | gpl-2.0 | 3,170 | 0.026814 | '''
Created on Sep 15, 2012
@author: peyman kazemian
'''
from examples_utils.network_loader import load_network
from config_parser.cisco_router_parser import cisco_router
from utils.wildcard import wildcard_create_bit_repeat
from utils.wildcard_utils import set_header_field
from headerspace.hs import headerspace
from time import time
import json
from headerspace.applications import find_reachability,print_paths
in_path = "stanford_json_rules/tf_rules"
out_path = "stanford_json_rules"
PORT_TYPE_MULTIPLIER = 10000
SWITCH_ID_MULTIPLIER = 100000
rtr_names = ["bbra_rtr",
"bbrb_rtr",
"boza_ | rtr",
"bozb_rtr",
"coza_rtr",
"cozb_rtr",
"goza_rtr",
"gozb_rtr",
"poza_rtr",
"pozb_rtr",
"roza_rtr",
"rozb_ | rtr",
"soza_rtr",
"sozb_rtr",
"yoza_rtr",
"yozb_rtr",
]
table_id = 0
topo = json.load(open(in_path+"/"+"topology.tf.json"))
topology = {"topology":[]}
for rule in topo["rules"]:
in_ports = rule["in_ports"]
out_ports = rule["out_ports"]
for in_port in in_ports:
for out_port in out_ports:
topology["topology"].append({"src":in_port,"dst":out_port})
for rtr_name in rtr_names:
tf = json.load(open(in_path+"/"+rtr_name+".tf.json"))
table_id += 1
tf_in = {"rules":[], "ports":[], "id":table_id*10}
tf_mid = {"rules":[], "ports":[], "id":table_id*10+1}
tf_out = {"rules":[], "ports":[], "id":table_id*10+2}
topology["topology"].append({"src":table_id * SWITCH_ID_MULTIPLIER, "dst":table_id * SWITCH_ID_MULTIPLIER + 2 * PORT_TYPE_MULTIPLIER})
rtr_ports = set()
for rule in tf["rules"]:
rule.pop("line")
rule.pop("file")
rule.pop("influence_on")
rule.pop("affected_by")
rule.pop("inverse_match")
rule.pop("inverse_rewrite")
rule.pop("id")
if (rule["in_ports"][0] % SWITCH_ID_MULTIPLIER == 0):
mid_port = table_id * SWITCH_ID_MULTIPLIER + 2 * PORT_TYPE_MULTIPLIER
rule["in_ports"] = [mid_port]
tf_mid["rules"].insert(0,rule)
elif (rule["in_ports"][0] % SWITCH_ID_MULTIPLIER < PORT_TYPE_MULTIPLIER):
#input rules
for elem in rule["in_ports"]:
rtr_ports.add(elem)
tf_in["rules"].insert(0,rule)
else:
# output rules
rule_in_ports = []
for p in rule["in_ports"]:
rule_in_ports.append(p+PORT_TYPE_MULTIPLIER)
rule["in_ports"] = rule_in_ports
tf_out["rules"].insert(0,rule)
tf_in["ports"] = list(rtr_ports)
tf_out["ports"] = list(rtr_ports)
for port in rtr_ports:
topology["topology"].append({"src":port+PORT_TYPE_MULTIPLIER, "dst":port+2*PORT_TYPE_MULTIPLIER})
f_in = open(out_path+"/"+rtr_name+".in.rules.json",'w')
f_mid = open(out_path+"/"+rtr_name+".mid.rules.json",'w')
f_out = open(out_path+"/"+rtr_name+".out.rules.json",'w')
f_in.write(json.dumps(tf_in, indent=1))
f_mid.write(json.dumps(tf_mid, indent=1))
f_out.write(json.dumps(tf_out, indent=1))
f_in.close()
f_mid.close()
f_out.close()
f_topo = open(out_path+"/topology.json",'w')
f_topo.write(json.dumps(topology, indent=1))
|
dwhickox/NCHS-Programming-1-Python-Programs | Chap 5/MoviesProj.py | Python | mit | 954 | 0.01782 | #D | avid Hickox
#May 2 17
#Chap 5 test EC
#sorts a list of movies and actors
#variables
# movie, Stores the movie data
# actors, stotes the actor data
print("Welcome to the (enter name here bumbblefack) Program\n")
movie = []
actors = []
for i in range(5):
movie.append((input("Movie "+str(i+1)+"? ")).title())
actors.append((input("Who stars in "+ | movie[i]+"? ")).title())
rng = len(movie)
sw = 1
while sw == 1:
sw = 0
for i in range(rng):
if movie[i]<movie[i-1] and i != 0:
sw = 1
temp = movie[i]
tempa = actors[i]
movie[i] = movie[i-1]
actors[i] = actors[i-1]
movie[i-1] = temp
actors[i-1] = tempa
rng -= 1
print("\nMovies\t\tActor")
for i in range(len(movie)):
if len(movie[i])>7:
print(movie[i],actors[i],sep = "\t")
else:
print(movie[i],actors[i],sep = "\t\t")
input("\nPress Enter to Exit")
|
koder-ua/nailgun-fcert | nailgun/nailgun/test/unit/test_deployment_serializer.py | Python | apache-2.0 | 4,224 | 0 | # Copyright 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from nailgun.errors import errors
from nailgun.test.base import BaseUnitTest
from nailgun.orchestrator import deployment_serializers as ds
from nailgun.orchestrator import priority_serializers as ps
class TestCreateSerializer(BaseUnitTest):
"""Test cases for `create_seriali | zer` function.
"""
@mock.patch(
'nailgun.orchestrator.deployment_serializers.extract_env_version',
return_value='5.0')
def test_retreiving_ha_for_5_0(self, _):
cluster = mock.MagicMock(is_ha_mode=True)
self.assertTrue(
isinstance(
ds.create_serializer(cluster),
ds.DeploymentHASeria | lizer))
@mock.patch(
'nailgun.orchestrator.deployment_serializers.extract_env_version',
return_value='5.0')
def test_retreiving_multinode_for_5_0(self, _):
cluster = mock.MagicMock(is_ha_mode=False)
self.assertTrue(
isinstance(
ds.create_serializer(cluster),
ds.DeploymentMultinodeSerializer))
@mock.patch(
'nailgun.orchestrator.deployment_serializers.extract_env_version',
return_value='5.1')
def test_retreiving_ha_for_5_1(self, _):
cluster = mock.MagicMock(is_ha_mode=True)
self.assertTrue(
isinstance(
ds.create_serializer(cluster), ds.DeploymentHASerializer51))
@mock.patch(
'nailgun.orchestrator.deployment_serializers.extract_env_version',
return_value='5.1')
def test_retreiving_multinode_for_5_1(self, _):
cluster = mock.MagicMock(is_ha_mode=False)
self.assertTrue(
isinstance(
ds.create_serializer(cluster),
ds.DeploymentMultinodeSerializer51))
@mock.patch(
'nailgun.orchestrator.deployment_serializers.extract_env_version',
return_value='9999.0')
def test_unsupported_serializer(self, _):
cluster = mock.MagicMock(is_ha_mode=True)
self.assertRaises(
errors.UnsupportedSerializer, ds.create_serializer, cluster)
@mock.patch(
'nailgun.orchestrator.deployment_serializers.extract_env_version',
return_value='5.0')
def test_regular_priority_serializer_ha(self, _):
cluster = mock.MagicMock(is_ha_mode=True, pending_release_id=None)
prio = ds.create_serializer(cluster).priority
self.assertTrue(isinstance(prio, ps.PriorityHASerializer50))
@mock.patch(
'nailgun.orchestrator.deployment_serializers.extract_env_version',
return_value='5.0')
def test_regular_priority_serializer_mn(self, _):
cluster = mock.MagicMock(is_ha_mode=False, pending_release_id=None)
prio = ds.create_serializer(cluster).priority
self.assertTrue(isinstance(prio, ps.PriorityMultinodeSerializer50))
@mock.patch(
'nailgun.orchestrator.deployment_serializers.extract_env_version',
return_value='5.0')
def test_patching_priority_serializer_ha(self, _):
cluster = mock.MagicMock(is_ha_mode=True, pending_release_id=42)
prio = ds.create_serializer(cluster).priority
self.assertTrue(isinstance(prio, ps.PriorityHASerializerPatching))
@mock.patch(
'nailgun.orchestrator.deployment_serializers.extract_env_version',
return_value='5.0')
def test_patching_priority_serializer_mn(self, _):
cluster = mock.MagicMock(is_ha_mode=False, pending_release_id=42)
prio = ds.create_serializer(cluster).priority
self.assertTrue(
isinstance(prio, ps.PriorityMultinodeSerializerPatching))
|
openbroadcaster/obplayer | obplayer.py | Python | agpl-3.0 | 887 | 0.003382 | #!/usr/bin/python3
# -*- coding: utf-8 -*-
"""
Copyright 2012-2015 OpenBroadcaster, Inc.
This file is part of OpenBroadcaster Player.
OpenBroadcaster Player is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
OpenBroadcaster Player is | distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOS | E. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with OpenBroadcaster Player. If not, see <http://www.gnu.org/licenses/>.
"""
import obplayer
obplayer.main()
|
davidliwei/mageck | mageck/mageckCount.py | Python | bsd-3-clause | 14,299 | 0.051542 | #!/usr/bin/env python
""" MAGeCK count module
Copyright (c) 2014 Wei Li, Han Xu, Xiaole Liu lab
This code is free software; you can redistribute it and/or modify it
under the terms of the BSD License (see the file COPYING included with
the distribution).
@status: experimental
@version: $Revision$
@author: Wei Li
@contact: li.david.wei AT gmail.com
"""
from __future__ import print_function
import sys;
import argparse;
import math;
import logging;
from testVisualCount import *;
def mageckcount_parseargs():
"""
Parse arguments. Only used when mageckCount.py is executed directly.
"""
parser=argparse.ArgumentParser(description='Collecting read counts for multiple samples.');
parser.add_argument('-l','--list-seq',required=True,help='A file containing the list of sgRNA names, their sequences and associated genes. Support file format: csv and txt.');
parser.add_argument('--sample-label',default='',help='Sample labels, separated by comma (,). Must be equal to the number of samples provided. Default "sample1,sample2,...".');
parser.add_argument('-n','--output-prefix',default='sample1',help='The prefix of the output file(s). Default sample1.');
parser.add_argument('--trim-5',type=int,default=0,help='Length of trimming the 5\' of the reads. Default 0');
parser.add_argument('--sgrna-len',type=int,default=20,help='Length of the sgRNA. Default 20');
parser.add_argument('--count-n',action='store_true',help='Count sgRNAs with Ns. By default, sgRNAs containing N will be discarded.');
parser.add_argument('--fastq',nargs='+',help='Sample fastq files, separated by space; use comma (,) to indicate technical replicates of the same sample. For example, "--fastq sample1_replicate1.fastq,sample1_replicate2.fastq sample2_replicate1.fastq,sample2_replicate2.fastq" indicates two samples with 2 technical replicates for each sample.');
args=parser.parse_args();
return args;
def mageckcount_checkargs(args):
"""
Check args
"""
if args.sample_label!='':
nlabel=args.sample_label.split(',');
#nfq=args.fastq.split(',');
nfq=(args.fastq);
if len(nlabel)!=len(nfq):
logging.error('The number of labels ('+str(nlabel)+') must be equal to the number of fastq files provided.');
sys.exit(-1);
return 0;
def normalizeCounts(ctable,method='median'):
"""
Normalize read counts
Return value: {sgRNA:[read counts]}
"""
# sums
if len(ctable)==0:
return ctable.copy();
n=len(ctable[ctable.keys()[0]]); # samples
m=len(ctable); # sgRNAs
# calculate the sum
sumsample=[0]*n;
for (k,v) in ctable.iteritems():
sumsample=[sumsample[i]+v[i] for i in range(n)];
logging.info('Total read counts of each sample: '+' '.join([str(x) for x in sumsample]));
logging.debug('Normalization method: '+method);
# normalizing factor
avgsample=sum(sumsample)/float(n);
samplefactor=[avgsample/k for k in sumsample];
logging.debug('Initial (total) size factor: '+' '.join([str(x) for x in samplefactor]));
if method=='median':
# calculate the average
# meanval={k:(sum(v)*1.0/n) for (k,v) in ctable.iteritems() if sum(v)>0}; # mean
meanval={k:math.exp( (sum( [ math.log(v2+1.0) for v2 in v])*1.0/n) ) for (k,v) in ctable.iteritems() if sum(v)>0}; # geometric mean
meanval={k:(lambda x: x if x>0 else 1)(v) for (k,v) in meanval.iteritems()};
#samplefactor=[0]*n;
usetotalnorm=False;
medianfactor=[x for x in samplefactor];
for ni in range(n):
meanfactor=[ v[ni]/meanval[k] for (k,v) in ctable.iteritems() if k in meanval];
#print(str(sorted(meanfactor)))
xfactor=sorted(meanfactor)[len(meanfactor)//2]; # corrected
if xfactor>0.0:
medianfactor[ni]=1.0/xfactor;
#logging.debug('xfactor:'+str(xfactor));
else:
logging.warning('Sample '+str(ni)+' has zero median count, so median normalization is not possible. Switch to total read count normalization.');
usetotalnorm=True;
# normalizing factor
if usetotalnorm:
pass;
else:
samplefactor=medianfactor;
logging.debug('Median factor: '+' '.join([str(x) for x in samplefactor]));
elif method=='none':
samplefactor=[1]*n;
logging.debug('Final factor: '+' '.join([str(x) for x in samplefactor]));
# normalize the table
ntable={ k: [ samplefactor[i]*v[i] for i in range(n)] for (k,v) in ctable.iteritems()};
return ntable;
def mageckcount_processonefile(filename,args,ctab,genedict,datastat):
'''
Go through one fastq file
Parameters
----------
filename
Fastq filename to be sequence
args
Arguments
ctab
A dictionary of sgRNA sequence and count
genedict
{sequence:(sgRNA_id,gene_id)} dictionary
datastat
Statistics of datasets ({key:value})
Return value
----------
'''
# ctab={};
nline=0;
logging.info('Parsing file '+filename+'...');
nreadcount=0;
for line in open(filename):
nline=nline+1;
if nline%1000000==1:
logging.info('Processing '+str(round(nline/1000000))+ 'M lines..');
if nline%4 == 2:
nreadcount+=1;
fseq=line.strip();
if args.trim_5 >0:
fseq=fseq[args.trim_5:];
if len(fseq)<args.sgrna_len:
continue;
fseq=fseq[:args.sgrna_len];
if fseq.count('N')>0 and args.count_n==False:
continue;
if fseq not in ctab:
ctab[fseq]=0;
ctab[fseq]=ctab[fseq]+1;
# statistics
datastat['reads']=nreadcount;
# check if a library is provided
if len(genedict)==0:
datastat['mappedreads']=0;
datastat['zerosgrnas']=0;
else:
nmapped=0;
for (k,v) in ctab.iteritems():
if k in genedict:
nmapped+=v;
nzerosg=0;
for (k,v) in genedict.iteritems():
if k not in ctab:
nzerosg+=1;
logging.info('mapped:'+str(nmapped));
datastat['mappedreads']=nmapped;
datastat['zerosgrnas']=nzerosg;
#return ctab;
return 0;
def mageckcount_mergedict(dict0,dict1):
'''
Merge all items in dict1 to dict0.
'''
nsample=0;
if len(dict0)>0:
nsample=len(dict0[dict0.keys()[0]]);
for (k,v) in dict0.iteritems():
if k in dict1:
v+=[dict1[k]];
else:
v+=[0];
for (k,v) in dict1.iteritems():
if k not in dict0:
if nsample>0:
dict0[k]=[0]*nsample;
else:
dict0[k]=[];
dict0[k]+=[v];
# return dict0;
| def mageckcount_printdict(dict0,args,ofile,sgdict,datastat,sep='\t'):
'''
Write the table count to file
'''
allfastq=args.fastq;
nsample=len(allfastq);
slabel=[datastat[f.split(',')[0]]['label'] for f in allfastq];
# print header
print('sgRNA'+sep+'Gene'+sep+sep.join(slabel),file=ofile);
# print items
if len(sgdict)==0:
for (k,v) in dict0.iteritems():
print(k+sep+'None'+ | sep+sep.join([str(x) for x in v]),file=ofile);
else:
for (k,v) in dict0.iteritems():
if k not in sgdict: # only print those in the genedict
continue;
sx=sgdict[k];
print(sep.join([sx[0],sx[1]])+sep+sep.join([str(x) for x in v]),file=ofile);
# print the remaining counts, fill with 0
for (k,v) in sgdict.iteritems():
if k not in dict0:
print(sep.join([v[0],v[1]])+sep+sep.join(["0"]*nsample),file=ofile);
def mageck_printdict(dict0,args,sgdict,sampledict,sampleids):
"""Write the normalized read counts to file
Parameters
----------
dict0 : dict
a {sgRNA: [read counts]} structure
args : class
a argparse class
sgdict: dict
a {sgrna:gene} dictionary
sampledict: dict
a {sample name: index} dict
sampleids: list
a list of sample index. Should include control+treatment
"""
# print header
# print items
dfmt="{:.5g}"
ofile=open(args.output_prefix+'.normalized.txt','w');
# headers
mapres_list=['']*len(sampledict);
for (k,v) in sampledict.iteritems():
mapres_list[v]=k;
if len(sampledict)>0:
cntheader=[mapres_list[x] for x in sampleids]
else:
cntheader=None;
logging.info('Writing normalized read counts to '+args.output_prefix+'.normalized.txt');
if cntheader !=None:
print('sgRNA\tGene\t'+'\t'.join(cntheader),file=ofile);
if len(sgdict)==0:
for (k,v) in dict0.iteritems():
|
thiagopa/thiagopagonha | blog/urls.py | Python | bsd-3-clause | 647 | 0.010819 | from django.conf.urls.defaults import *
from django.views.generic import list_detail, date_based
from blog.models import *
from blog.views import *
urlpatterns = patterns('blog.views',
(r"^preview/(\d+)$", "preview"),
(r"^publish/(\d+)$", "publish"),
(r | "^(?P<slug>[a-zA-Z0-9-]+)/$", "post"),
(r"^notify$", "send_mail"),
(r"^archive$", "archive"),
(r"^category/(\d+)/$", "category"),
#url(r'^category/(\d+)/$', blog_posts_by_category, name="blog_posts_by_category"),
#url(r'^search/$', blog_post_search, name="blog_post_search"),
u | rl(r'^(?P<template>\w+)/$', static_page, name="static_page"),
(r"", "main"),
)
|
wltrimbl/google-python-exercises3 | basic/solution/mimic.py | Python | apache-2.0 | 3,158 | 0.00095 | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Translated to python3 wltrimbl 2016
"""Mimic pyquick exercise -- optional extra exercise.
Google's Python Class
Read in the file specified on the command line.
Do a simple split() on whitespace to obtain all the words in the | file.
Rather than read the file line by line, it's easier to read
it into one giant string and split it once.
Build a "mimic" dict that maps each word that appears in the file
to a list of all the words that immediately follow that word in the file.
The list of words can be be in any orde | r and should include
duplicates. So for example the key "and" might have the list
["then", "best", "then", "after", ...] listing
all the words which came after "and" in the text.
We'll say that the empty string is what comes before
the first word in the file.
With the mimic dict, it's fairly easy to emit random
text that mimics the original. Print a word, then look
up what words might come next and pick one at random as
the next work.
Use the empty string as the first word to prime things.
If we ever get stuck with a word that is not in the dict,
go back to the empty string to keep things moving.
Note: the standard python module 'random' includes a
random.choice(list) method which picks a random element
from a non-empty list.
For fun, feed your program to itself as input.
Could work on getting it to put in linebreaks around 70
columns, so the output looks better.
"""
import random
import sys
def mimic_dict(filename):
"""Returns mimic dict mapping each word to list of words which follow it."""
# +++your code here+++
# LAB(begin solution)
mimic_dict = {}
f = open(filename, 'r')
text = f.read()
f.close()
words = text.split()
prev = ''
for word in words:
if not prev in mimic_dict:
mimic_dict[prev] = [word]
else:
mimic_dict[prev].append(word)
# Could write as: mimic_dict[prev] = mimic_dict.get(prev, []) + [word]
# It's one line, but not totally satisfying.
prev = word
return mimic_dict
# LAB(replace solution)
# return
# LAB(end solution)
def print_mimic(mimic_dict, word):
"""Given mimic dict and start word, prints 200 random words."""
# +++your code here+++
# LAB(begin solution)
for unused_i in range(200):
print(word, end=' ')
nexts = mimic_dict.get(word) # Returns None if not found
if not nexts:
nexts = mimic_dict[''] # Fallback to '' if not found
word = random.choice(nexts)
# The 'unused_' prefix turns off the lint warning about the unused variable.
# LAB(replace solution)
# return
# LAB(end solution)
# Provided main(), calls mimic_dict() and mimic()
def main():
if len(sys.argv) != 2:
print('usage: ./mimic.py file-to-read')
sys.exit(1)
dict = mimic_dict(sys.argv[1])
print_mimic(dict, '')
if __name__ == '__main__':
main()
|
amedina14/uip-iq17-pc3 | clase 7/Documentacion/tests/TestITBMS.py | Python | mit | 231 | 0.017316 | import unittest |
from app.itbms import calcular_itbms
class TestITBMS(unittest.TestCase):
def test_calcular_itbms(self):
self.assertEqual(calcular_itbms(1.0),0.07)
if __name__=='__main__':
uni | ttest.main()
|
ankur-gupta91/horizon-net-ip | openstack_dashboard/test/helpers.py | Python | apache-2.0 | 25,725 | 0 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import copy
from functools import wraps # noqa
import os
import unittest
import django
from django.conf import settings
from django.contrib.messages.storage import default_storage # noqa
from django.core.handlers import wsgi
from django.core import urlresolvers
from django.test.client import RequestFactory # noqa
from django.test import utils as django_test_utils
from ceilometerclient.v2 import client as ceilometer_client
from cinderclient import client as cinder_client
import glanceclient
from heatclient import client as heat_client
import httplib2
from importlib import import_module
from keystoneclient.v2_0 import client as keystone_client
import mock
from mox3 import mox
from neutronclient.v2_0 import client as neutron_client
from novaclient.v2 import client as nova_client
from openstack_auth import user
from openstack_auth import utils
import six
from six import moves
from swiftclient import client as swift_client
from horizon import base
from horizon import conf
from horizon.test import helpers as horizon_helpers
from openstack_dashboard import api
from openstack_dashboard import context_processors
from openstack_dashboard.test.test_data import utils as test_utils
# Makes output of failing mox tests much easier to read.
wsgi.WSGIRequest.__repr__ = lambda self: "<class 'django.http.HttpRequest'>"
def create_stubs(stubs_to_create=None):
"""decorator to simplify setting up multiple stubs at once via mox
:param stubs_to_create: methods to stub in one or more modules
:type stubs_to_create: dict
The keys are python paths to the module containing the methods to mock.
To mock a method in openstack_dashboard/api/nova.py, the key is::
api.nova
The values are either a tuple or list of methods to mock in the module
indicated by the key.
For example::
('server_list',)
-or-
('flavor_list', 'server_list',)
-or-
['flavor_list', 'server_list']
Additionally, multiple modules can be mocked at once::
{
api.nova: ('flavor_list', 'server_list'),
api.glance: ('image_list_detailed',),
| }
"""
if stubs_to_create is None:
stubs_to_create = {}
if not isinstance(stubs_to_create, dict):
raise TypeError("create_stub must be passed a dict, but a %s was "
" | given." % type(stubs_to_create).__name__)
def inner_stub_out(fn):
@wraps(fn)
def instance_stub_out(self, *args, **kwargs):
for key in stubs_to_create:
if not (isinstance(stubs_to_create[key], tuple) or
isinstance(stubs_to_create[key], list)):
raise TypeError("The values of the create_stub "
"dict must be lists or tuples, but "
"is a %s."
% type(stubs_to_create[key]).__name__)
for value in stubs_to_create[key]:
self.mox.StubOutWithMock(key, value)
return fn(self, *args, **kwargs)
return instance_stub_out
return inner_stub_out
class RequestFactoryWithMessages(RequestFactory):
def get(self, *args, **kwargs):
req = super(RequestFactoryWithMessages, self).get(*args, **kwargs)
req.user = utils.get_user(req)
req.session = []
req._messages = default_storage(req)
return req
def post(self, *args, **kwargs):
req = super(RequestFactoryWithMessages, self).post(*args, **kwargs)
req.user = utils.get_user(req)
req.session = []
req._messages = default_storage(req)
return req
@unittest.skipIf(os.environ.get('SKIP_UNITTESTS', False),
"The SKIP_UNITTESTS env variable is set.")
class TestCase(horizon_helpers.TestCase):
"""Specialized base test case class for Horizon.
It gives access to numerous additional features:
* A full suite of test data through various attached objects and
managers (e.g. ``self.servers``, ``self.user``, etc.). See the
docs for
:class:`~openstack_dashboard.test.test_data.utils.TestData`
for more information.
* The ``mox`` mocking framework via ``self.mox``.
* A set of request context data via ``self.context``.
* A ``RequestFactory`` class which supports Django's ``contrib.messages``
framework via ``self.factory``.
* A ready-to-go request object via ``self.request``.
* The ability to override specific time data controls for easier testing.
* Several handy additional assertion methods.
"""
def setUp(self):
def fake_conn_request(*args, **kwargs):
raise Exception("An external URI request tried to escape through "
"an httplib2 client. Args: %s, kwargs: %s"
% (args, kwargs))
self._real_conn_request = httplib2.Http._conn_request
httplib2.Http._conn_request = fake_conn_request
self._real_context_processor = context_processors.openstack
context_processors.openstack = lambda request: self.context
self.patchers = {}
self.add_panel_mocks()
super(TestCase, self).setUp()
def _setup_test_data(self):
super(TestCase, self)._setup_test_data()
test_utils.load_test_data(self)
self.context = {'authorized_tenants': self.tenants.list()}
def _setup_factory(self):
# For some magical reason we need a copy of this here.
self.factory = RequestFactoryWithMessages()
def _setup_user(self):
self._real_get_user = utils.get_user
tenants = self.context['authorized_tenants']
self.setActiveUser(id=self.user.id,
token=self.token,
username=self.user.name,
domain_id=self.domain.id,
user_domain_name=self.domain.name,
tenant_id=self.tenant.id,
service_catalog=self.service_catalog,
authorized_tenants=tenants)
def _setup_request(self):
super(TestCase, self)._setup_request()
self.request.session['token'] = self.token.id
def add_panel_mocks(self):
"""Global mocks on panels that get called on all views."""
self.patchers['aggregates'] = mock.patch(
'openstack_dashboard.dashboards.admin'
'.aggregates.panel.Aggregates.can_access',
mock.Mock(return_value=True))
self.patchers['aggregates'].start()
def tearDown(self):
httplib2.Http._conn_request = self._real_conn_request
context_processors.openstack = self._real_context_processor
utils.get_user = self._real_get_user
mock.patch.stopall()
super(TestCase, self).tearDown()
def setActiveUser(self, id=None, token=None, username=None, tenant_id=None,
service_catalog=None, tenant_name=None, roles=None,
authorized_tenants=None, enabled=True, domain_id=None,
user_domain_name=None):
def get_user(request):
return user.User(id=id,
token=token,
user=username,
domain_id=domain_id,
|
santazhang/BitTorrent-4.0.0-GPL | BitTorrent/btformats.py | Python | gpl-3.0 | 5,378 | 0.007066 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Written by Bram Cohen
import re
from BitTorrent import BTFailure
allowed_path_re = re.compile(r'^[^/\\.~][^/\\]*$')
ints = (long, int)
def check_info(info, check_paths=True):
if type(info) != dict:
raise BTFailure, 'bad metainfo - not a dictionary'
pieces = info.get('pieces')
if type(pieces) != str or len(pieces) % 20 != 0:
raise BTFailure, 'bad metainfo - bad pieces key'
piecelength = info.get('piece length')
if type(piecelength) not in ints or piecelength <= 0:
raise BTFailure, 'bad metainfo - illegal piece length'
name = info.get('name')
if type(name) != str:
raise BTFailure, 'bad metainfo - bad name'
if not allowed_path_re.match(name):
raise BTFailure, 'name %s disallowed for security reasons' % name
if info.has_key('files') == info.has_key('length'):
raise BTFailure, 'single/multiple file mix'
if info.has_key('length'):
length = info.get('length')
if type(length) not in ints or length < 0:
raise BTFailure, 'bad metainfo - bad length'
else:
files = info.get('files')
if type(files) != list:
raise BTFailure, 'bad metainfo - "files" is not a list of files'
for f in files:
if type(f) != dict:
raise BTFailure, 'bad metainfo - bad file value'
length = f.get('length')
if type(length) not in ints or length < 0:
raise BTFailure, 'bad metainfo - bad length'
path = f.get('path')
if type(path) != list or path == []:
raise BTFailure, 'bad metainfo - bad path'
for p in path:
if type(p) != str:
raise BTFailure, 'bad metainfo - bad path dir'
if check_paths and not allowed_path_re.match(p):
raise BTFailure, 'path %s disallowed for security reasons' % p
f = ['/'.join(x['path']) for x in files]
f.sort()
i = iter(f)
try:
name2 = i.next()
while True:
name1 = name2
name2 = i.next()
if name2.startswith(name1):
if name1 == name2:
raise BTFailure, 'bad metainfo - duplicate path'
elif name2[len(name1)] == '/':
raise BTFailure('bad metainfo - name used as both '
'file and subdirectory name')
except StopIteration:
pass
def check_message(message, check_paths=True):
if type(message) != dict:
raise BTFailure, 'bad metainfo - wrong object type'
check_info(message.get('info'), check_paths)
if type(message.get('announce')) != str:
raise BTFailure, 'bad metainfo - no announce URL string'
def check_peers(message):
if type(message) != dict:
raise BTFailure
if message.has_key('failure reason'):
if type(message['failure reason']) != str:
raise BTFailure, 'non-text failure reason'
return
if message.has_key('warning message'):
if type(message['warning message']) != str:
raise BTFailure, 'non-text warning message'
peers = message.get('peers')
if type(peers) == list:
for p in peers:
if type(p) != dict:
raise BTFailure, 'invalid entry in peer list'
if type(p.get('ip')) != str:
raise BTFailure, 'invalid entry in peer list'
port = p.get('port')
if type(port) not in ints or p <= 0:
raise BTFailure, 'invalid entry in peer list'
if p.has_key('peer id'):
peerid = p.get('peer id')
if type(peerid) != str or len(peerid) != 20:
raise BTFailure, 'invalid entry in peer list'
elif type(peers) != str or len(peers) % 6 != 0:
raise BTFailure, 'invalid peer list'
interval = message.get('interval', 1)
if type(interval) not in ints or interval <= 0:
raise BTFailure, 'inv | alid announce interval'
minint = message.get('min interval', 1)
if type(minint) not in ints or minint <= 0:
raise BTFailure, 'invalid min announce interval'
if type(message.get('tracker id', '')) != str:
raise BTFailure, 'invalid tracker id'
npeers = message.get('num peers', 0)
if type(npeers) not in ints or npeers < 0:
raise BTFailur | e, 'invalid peer count'
dpeers = message.get('done peers', 0)
if type(dpeers) not in ints or dpeers < 0:
raise BTFailure, 'invalid seed count'
last = message.get('last', 0)
if type(last) not in ints or last < 0:
raise BTFailure, 'invalid "last" entry'
|
Akasurde/bodhi | bodhi/services/overrides.py | Python | gpl-2.0 | 8,799 | 0.001023 | # This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import math
from cornice import Service
from pyramid.exceptions import HTTPNotFound
from sqlalchemy import func, distinct
from sqlalchemy.sql import or_
from bodhi import log
from bodhi.models import Build, BuildrootOverride, Package, Release, User
import bodhi.schemas
import bodhi.services.errors
from bodhi.validators import (
validate_override_builds,
validate_expiration_date,
validate_packages,
validate_releases,
validate_username,
)
override = Service(name='override', path='/overrides/{nvr}',
description='Buildroot Overrides',
cors_origins=bodhi.security.cors_origins_ro)
overrides = Service(name='overrides', path='/overrides/',
description='Buildroot Overrides',
# Note, this 'rw' is not a typo. the @comments service has
# a ``post`` section at the bottom.
cors_origins=bodhi.security.cors_origins_rw)
@override.get(accept=("application/json", "text/json"), renderer="json",
error_handler=bodhi.services.errors.json_handler)
@override.get(accept=("application/javascript"), renderer="jsonp",
error_handler=bodhi.services.errors.jsonp_handler)
@override.get(accept=("text/html"), renderer="override.html",
error_handler=bodhi.services.errors.html_handler)
def get_override(request):
db = request.db
nvr = request.matchdict.get('nvr')
build = Build.get(nvr, db)
if not build:
request.errors.add('url', 'nvr', 'No such build')
request.errors.status = HTTPNotFound.code
return
if not build.override:
request.errors.add('url', 'nvr',
'No buildroot override for this build')
request.errors.status = HTTPNotFound.code
return
return dict(override=build.override)
@overrides.get(schema=bodhi.schemas.ListOverrideSchema,
accept=("application/json", "text/json"), renderer="json",
error_handler=bodhi.services.errors.json_handler,
validators=(validate_packages, validate_releases,
validate_username)
)
@overrides.get(schema=bodhi.schemas.ListOverrideSchema,
accept=("application/javascript"), renderer="jsonp",
error_handler=bodhi.services.errors.jsonp_handler,
validators=(validate_packages, validate_releases,
validate_username)
)
@overrides.get(schema=bodhi.schemas.ListOverrideSchema,
accept=('application/atom+xml'), renderer='rss',
error_handler=bodhi.services.errors.html_handler,
validators=(validate_packages, validate_releases,
validate_username)
)
@overrides.get(schema=bodhi.schemas.ListOverrideSchema,
accept=('text/html'), renderer='overrides.html',
error_handler=bodhi.services.errors.html_handler,
validators=(validate_packages, validate_releases,
validate_username)
)
def query_overrides(request):
db = request.db
data = request.validated
query = db.query(BuildrootOverride)
expired = data.get('expired')
if expired is not None:
if expired:
query = query.filter(BuildrootOverride.expired_date!=None)
else:
query = query.filter(BuildrootOverride.expired_date==None)
packages = data.get('packages')
if packages is not None:
query = query.join(BuildrootOverride.build).join(Build.package)
query = query.filter(or_(*[Package.name==pkg.na | me for pkg in packages]))
releases = data.get('releases')
if releases is not None:
query = query.join(BuildrootOverride.build).join(Build.release)
query = query.filter(or_(*[Release.name==r.name for r in releases]))
like = data.get('like')
if like is not None:
query = query.join(BuildrootOverride.build)
query = query.filter(or_(*[
Build.nvr.like('%%%s%%' % like)
]))
submitter = dat | a.get('user')
if submitter is not None:
query = query.filter(BuildrootOverride.submitter==submitter)
query = query.order_by(BuildrootOverride.submission_date.desc())
# We can't use ``query.count()`` here because it is naive with respect to
# all the joins that we're doing above.
count_query = query.with_labels().statement\
.with_only_columns([func.count(distinct(BuildrootOverride.id))])\
.order_by(None)
total = db.execute(count_query).scalar()
page = data.get('page')
rows_per_page = data.get('rows_per_page')
pages = int(math.ceil(total / float(rows_per_page)))
query = query.offset(rows_per_page * (page - 1)).limit(rows_per_page)
return dict(
overrides=query.all(),
page=page,
pages=pages,
rows_per_page=rows_per_page,
total=total,
chrome=data.get('chrome'),
display_user=data.get('display_user'),
)
@overrides.post(schema=bodhi.schemas.SaveOverrideSchema,
acl=bodhi.security.packagers_allowed_acl,
accept=("application/json", "text/json"), renderer='json',
error_handler=bodhi.services.errors.json_handler,
validators=(
validate_override_builds,
validate_expiration_date,
))
@overrides.post(schema=bodhi.schemas.SaveOverrideSchema,
acl=bodhi.security.packagers_allowed_acl,
accept=("application/javascript"), renderer="jsonp",
error_handler=bodhi.services.errors.jsonp_handler,
validators=(
validate_override_builds,
validate_expiration_date,
))
def save_override(request):
"""Save a buildroot override
This entails either creating a new buildroot override, or editing an
existing one. To edit an existing buildroot override, the buildroot
override's original id needs to be specified in the ``edited`` parameter.
"""
data = request.validated
edited = data.pop("edited")
caveats = []
try:
submitter = User.get(request.user.name, request.db)
if edited is None:
builds = data['builds']
overrides = []
if len(builds) > 1:
caveats.append({
'name': 'nvrs',
'description': 'Your override submission was '
'split into %i.' % len(builds)
})
for build in builds:
log.info("Creating a new buildroot override: %s" % build.nvr)
overrides.append(BuildrootOverride.new(
request,
build=build,
submitter=submitter,
notes=data['notes'],
expiration_date=data['expiration_date'],
))
if len(builds) > 1:
result = dict(overrides=overrides)
else:
result = overrides[0]
else:
log.info("Editing buildroot override: %s" % edited)
edited = Build.get(edited, request.db)
if edited is None:
request.errors.add('body', 'edited', 'No such build')
return
result = BuildrootOverride.edit(
|
dmlc/tvm | python/tvm/meta_schedule/testing/relay_workload.py | Python | apache-2.0 | 6,298 | 0.001905 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Workloads in Relay IR"""
from enum import Enum
from typing import Dict, Tuple
import tvm.relay.testing # pylint: disable=unused-import
from tvm import relay
from tvm.ir import IRModule
from tvm.runtime import NDArray
# Model types supported in Torchvision
class MODEL_TYPE(Enum): # pylint: disable=invalid-name
IMAGE_CLASSIFICATION = (1,)
VIDEO_CLASSIFICATION = (2,)
SEGMENTATION = (3,)
OBJECT_DETECTION = (4,)
TEXT_CLASSIFICATION = (5,)
# Specify the type of each model
MODEL_TYPES = {
"resnet18": MODEL_TYPE.IMAGE_CLASSIFICATION,
"mobilenet_v2": MODEL_TYPE.IMAGE_CLASSIFICATION,
"bert_base": MODEL_TYPE.TEXT_CLASSIFICATION,
}
def get_torch_model(
model_name: str,
input_shape: Tuple[int, ...],
output_shape: Tuple[int, int], # pylint: disable=unused-argument
dtype: str = "float32",
) -> Tuple[IRModule, Dict[str, NDArray]]:
"""Load model from torch model zoo
Parameters
----------
model_name : str
The name of the model to load
input_shape: Tuple[int, ...]
Tuple for input shape
output_shape: Tuple[int, int]
Tuple for output shape
dtype: str
Tensor data type
"""
assert dtype == "float32"
import torch # type: ignore # pylint: disable=import-error,import-outside-toplevel
from torchvision import models # type: ignore # pylint: disable=import-error,import-outside-toplevel
import transformers # type: ignore # pylint: disable=import-error,import-outside-toplevel
import os # type: ignore # pylint: disable=import-error,import-outside-toplevel
def do_trace(model, inp):
model.eval()
model_trace = torch.jit.trace(model, inp)
model_trace.eval()
return model_trace
# Load model from torchvision
if MODEL_TYPES[model_name] == MODEL_TYPE.TEXT_CLASSIFICATION:
os.environ["TOKENIZERS_PARALLELISM"] = "false"
model = transformers.BertModel(
transformers.BertConfig(
num_hidden_layers=12,
hidden_size=768,
intermediate_size=3072,
num_attention_heads=12,
return_dict=False,
)
)
model.eval()
input_data = torch.randint(10000, input_shape)
shape_list = [("input_ids", input_shape)]
scripted_model = torch.jit.trace(model, [input_data], strict=False)
elif MODEL_TYPES[model_name] == MODEL_TYPE.IMAGE_CLASSIFICATION:
model = getattr(models, model_name)()
# Setup input
input_data = torch.randn(input_shape).type(torch.float32)
shape_list = [("input0", input_shape)]
# Get trace. Depending on the model type, wrapper may be necessary.
scripted_model = do_trace(model, input_data)
else:
raise ValueError("Unsupported model in Torch model zoo.")
# Convert torch model to relay module
mod, params = relay.frontend.from_pytorch(scripted_model, shape_list)
return mod, params
def get_network(
name: str,
batch_size: int,
layout: str = "NHWC",
dtype: str = "float32",
) -> Tuple[IRModule, Dict[str, NDArray], Tuple[int, int, int, int], Tuple[int, int]]:
"""Get the symbol definition and random weight of a network"""
# meta-schedule prefers NHWC layout
if layout == "NHWC":
image_shape = (224, 224, 3)
elif layout == "NCHW":
image_shape = (3, 224, 224)
else:
raise ValueError("Invalid layout: " + layout)
input_shape: Tuple[int, int, int, int] = (batch_size,) + image_shape
output_shape: Tuple[int, int] = (batch_size, 1000)
if name.startswith("resnet-"):
n_layer = int(name.split("-")[1])
mod, params = relay.testing.resnet.get_workload(
num_layers=n_layer,
batch_size=batch_size,
layout=layout,
dtype=dtype,
image_shape=image_shape,
)
elif name.startswith("resnet3d-"):
n_layer = int(name.split("-")[1])
mod, params = relay.testing.resnet.get_workload(
num_layers=n_layer,
batch_si | ze=batch_size,
layout=layout,
dtype=dtype,
| image_shape=image_shape,
)
elif name == "mobilenet":
mod, params = relay.testing.mobilenet.get_workload(
batch_size=batch_size, layout=layout, dtype=dtype, image_shape=image_shape
)
elif name == "squeezenet_v1.1":
assert layout == "NCHW", "squeezenet_v1.1 only supports NCHW layout"
mod, params = relay.testing.squeezenet.get_workload(
version="1.1",
batch_size=batch_size,
dtype=dtype,
image_shape=image_shape,
)
elif name == "inception_v3":
input_shape = (batch_size, 3, 299, 299) if layout == "NCHW" else (batch_size, 299, 299, 3)
mod, params = relay.testing.inception_v3.get_workload(batch_size=batch_size, dtype=dtype)
elif name == "mxnet":
from mxnet.gluon.model_zoo.vision import get_model # type: ignore # pylint: disable=import-outside-toplevel
assert layout == "NCHW"
block = get_model("resnet50_v1", pretrained=True)
mod, params = relay.frontend.from_mxnet(block, shape={"data": input_shape}, dtype=dtype)
net = mod["main"]
net = relay.Function(
net.params, relay.nn.softmax(net.body), None, net.type_params, net.attrs
)
mod = IRModule.from_expr(net)
return mod, params, input_shape, output_shape
|
amlyj/pythonStudy | 2.7/standard_library/i18n/i18n.py | Python | mit | 693 | 0.003241 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 17-9-28 下午5:22
# @Author : Tom.Lee
# @CopyRight : 2016-2017 OpenBridge by yihecloud
# @File | : test2.py
# @Product : PyCharm
# @Docs :
# @Source :
import gettext
domain = 'test'
locale_dir = 'locale/'
# 这条语句会将_()函数自动放到python的内置命名空间中,必须使用
gettext.install(domain, locale_dir)
zh_trans = gettext.translation(domain, locale_dir, languages=['zh_CN'])
# en_trans = gettext.translation(domain, locale_dir, languages=['en_US'])
# 中文版本
zh_trans.install()
# | "_()"显示未定义不用管
_ = _
# print _("Hello world!")
|
PGer/incubator-hawq | tools/legacy/gpmlib.py | Python | apache-2.0 | 35,426 | 0.008553 | from __future__ import with_statement
import os, sys
progname = os.path.split(sys.argv[0])[-1]
if sys.version_info < (2, 5, 0):
sys.exit(
'''Error: %s is supported on Python versions 2.5.0 or greater
Please upgrade python installed on this machine.''' % progname)
#turning off Deprecation warnings (for now)
import warnings
warnings.simplefilter('ignore', DeprecationWarning)
import platform, gplib, socket, random, popen2
import threading
from time import localtime, strftime
import pg8000
import pysync
##################
log = {}
log['verbose'] = True
log['module'] = ''
log['host'] = socket.gethostname().split('.')[0]
log['user'] = os.environ.get('USER') or os.environ.get('LOGNAME')
log['file'] = None
##################
def log_set_module(module):
global log
log['module'] = module
##################
def log_set_verbose(verbose):
global log
log['verbose'] = verbose
##################
def log_set_file(file):
global log
log['file'] = file
##################
def log_info(msg, tofile=False):
global log
logs = '%s:%s:%s:%s-[INFO]:- %s' % (strftime('%Y%m%d:%H:%M:%S', localtime()), log['module'], log['host'], log['user'], msg)
if log['verbose'] and not tofile:
print logs
else:
if log['file']:
os.system('%s "%s" >> %s' % (ENV.ECHO, logs, log['file']))
##################
def log_error(msg, tofile=False):
global log
logs = '%s:%s:%s:%s-[ERROR]:- %s' % (strftime('%Y%m%d:%H:%M:%S', localtime()), log['module'], log['host'], log['user'], msg)
if log['verbose'] and not tofile:
print logs
else:
if log['file']:
os.system('%s "%s" >> %s' % (ENV.ECHO, logs, log['file']))
##################
def log_warn(msg, tofile=False):
global log
logs = '%s:%s:%s:%s-[WARN]:- %s' % (strftime('%Y%m%d:%H:%M:%S', localtime()), log['module'], log['host'], log['user'], msg)
if log['verbose'] and not tofile:
print logs
else:
if log['file']:
os.system('%s "%s" >> %s' % (ENV.ECHO, logs, log['file']))
##################
def log_fatal(msg, tofile=False):
global log
logs = '%s:%s:%s:%s-[FATAL]:- %s' % (strftime('%Y%m%d:%H:%M:%S', localtime()), log['module'], log['host'], log['user'], msg)
if log['verbose'] and not tofile:
print logs
else:
if log['file']:
os.system('%s "%s" >> %s' % (ENV.ECHO, logs, log['file']))
##################
def error(msg):
global log
logs = '%s:%s:%s:%s-[ERROR]:- %s' % (strftime('%Y%m%d:%H:%M:%S', localtime()), log['module'], log['host'], log['user'], msg)
if log['file']:
os.system('%s "%s" >> %s' % (ENV.ECHO, logs, log['file']))
print logs
print '%s:%s:%s:%s-[ERROR]:- Program aborted.' % (strftime('%Y%m%d:%H:%M:%S', localtime()), log['module'], log['host'], log['user'])
sys.exit(1)
##################
def fatal(msg):
global log
logs = '%s:%s:%s:%s-[FATAL]:- %s' % (strftime('%Y%m%d:%H:%M:%S', localtime()), log['module'], log['host'], log['user'], msg)
if log['file']:
os.system('%s "%s" >> %s' % (ENV.ECHO, logs, log['file']))
print logs
print '%s:%s:%s:%s-[FATAL]:- Program aborted.' % (strftime('%Y%m%d:%H:%M:%S', localtime()), log['module'], log['host'], log['user'])
sys.exit(2)
#############
def findCmdInPath_noerror(cmd):
CMDPATH = ('/usr/kerberos/bin', '/usr/sfw/bin', '/opt/sfw/bin', '/usr/local/bin', '/bin',
'/usr/bin', '/sbin', '/usr/sbin', '/usr/ucb', '/sw/bin', '/opt/Navisphere/bin')
for p in CMDPATH:
f = os.path.join(p, cmd)
if os.path.exists(f):
return f
return ''
def findCmdInPath(cmd):
cmd = findCmdInPath_noerror(cmd)
if cmd == '':
fatal('Command %s not found' % cmd)
return cmd
#############
def makeCommand(cmd):
GPHOME=os.environ.get('GPHOME')
| LIB_PATH=os.environ.get(ENV.LIB_TYPE)
if not LIB_PATH:
LIB_PATH='%s/lib:%s/ext/python/lib:.' % (GPHOME, GPHOME)
PATH=os.environ.get('PATH')
if not PATH:
PATH='%s/bin:%s/ext/python/bin:.' % (GPHOME, GPHOME)
PYTHONPATH=os | .environ.get('PYTHONPATH')
if not PYTHONPATH:
PYTHONPATH="%(gphome)s/lib/python" % {'gphome':GPHOME}
return ('GPHOME=%s && export GPHOME '
'&& PATH=%s && export PATH '
'&& %s=%s && export %s '
'&& PYTHONPATH=%s && export PYTHONPATH '
'&& %s'
% (GPHOME,
PATH,
ENV.LIB_TYPE,
LIB_PATH,
ENV.LIB_TYPE,
PYTHONPATH,
cmd))
#############
def run2(cmd, on_error_warn=False, setpid_callback=None):
p = None
ok = False
out = []
try:
p = popen2.Popen3(cmd, capturestderr=True)
if setpid_callback:
setpid_callback(p.pid)
e = p.wait()
for line in p.fromchild:
out.append(line)
ok = not e
if not ok and on_error_warn:
log_warn('-----------------------------------------------------')
log_warn('Command Failed: %s' % cmd)
log_warn('Exit status: %d' % os.WEXITSTATUS(e))
if len(out) > 0:
log_warn('Standard output:')
for l in out:
log_warn('\t %s' % l.strip())
else:
log_warn('Standard output: None')
err = []
for line in p.childerr:
err.append(line)
if len(err) > 0:
log_warn('Standard error:')
for l in err:
log_warn('\t %s' % l.strip())
else:
log_warn('Standard error: None')
log_warn('-----------------------------------------------------')
finally:
if p:
if p.fromchild:
p.fromchild.close()
if p.childerr:
p.childerr.close()
return (ok, out)
def run(cmd):
return run2(cmd, False)
def run_warn(cmd, setpid_callback=None):
return run2(cmd, on_error_warn=True,setpid_callback=setpid_callback)
#############
def file_exists(file, host=None):
if not host or host == 'localhost':
return os.path.isfile(file)
else:
(ok, out) = run('%s test -f %s && test -r %s' % (gplib.ssh_prefix(host=host), file, file))
return ok
#############
def directory_exists(dir, host=None):
if not host or host == 'localhost':
return os.path.isdir(dir)
else:
(ok, out) = run('%s test -d %s' % (gplib.ssh_prefix(host=host), dir))
return ok
#############
def directory_writable(dir, host=None):
f = None
file = os.path.join(dir, 'tmp_file_test')
if not host or host == 'localhost':
try:
try:
f = open(file, 'w')
f.close()
except IOError, e:
fatal('write file %s error' % file)
finally:
f.close()
os.remove(file)
else:
gphome = os.environ.get('GPHOME')
cmd = makeCommand('''python -c \\"import sys, os; sys.path.extend(['%s', '%s']); import gpmlib; gpmlib.directory_writable('%s')\\"''' %
(os.path.join(gphome, 'bin', 'lib'), os.path.join(gphome, 'lib', 'python'), dir))
(ok, out) = run('''%s "%s"''' % (gplib.ssh_prefix(host=host), cmd))
if not ok:
fatal('write file %s error' % file)
return True
#############
class Env:
def __init__(self):
self.GPHOME = None
self.USER = None
# mirror type
self.MIRROR_NULL_TYPE = 0
self.MIRROR_SINGLE_HOME_GROUP_TYPE = 1
self.MIRROR_SINGLE_HOME_SPREAD_TYPE = 2
self.MIRROR_MULTI_HOME_GROUP_TYPE = 3
self.MIRROR_MULTI_HOME_SPREAD_TYPE = 4
self.DBNAME = 'template1'
self.GP_PG_VIEW = '''(SELECT l.dbid, l.isprimary, l.content, l."valid",
l.definedprimary FROM gp_pgdatabase() l(dbid smallint,
isprimary boolean, content s |
lsk112233/Clone-test-repo | minutes/management/commands/move_meeting_notes.py | Python | apache-2.0 | 1,113 | 0.000898 | import datetime
import re
from django.core.management.base import BaseCommand
from pages.models import Page
from ...models import Minutes
class Command(BaseCommand):
""" Move meeting notes from Pages to Minutes app """
def parse_date_from_path(self, path):
# Build our date from the URL
path_parts = path.split('/')
date = path_parts[-1]
m = re.match(r'^(\d\d\d\d)-(\d\d)-(\d\d)', date)
d = datetime.date(
int(m.group(1)),
int(m.group(2)),
int(m.group(3)),
)
return d
def handle(self, *args, **kwargs):
meeting_pages = Page.objects.filter(path__startswith='psf/records/board/minutes/')
for p in meeting_pages:
date = self.parse_date_from_path(p.path)
try:
m = Minutes.objects.get(date=date)
except Minutes.DoesNot | Exist:
m = Minutes(date=date)
m.content = p.content
m.content_markup_ | type = p.content_markup_type
m.is_published = True
m.save()
p.delete()
|
TEAM-HRA/hra_suite | HRAMath/src/hra_math/time_domain/poincare_plot/poincare_plot.py | Python | lgpl-3.0 | 29,136 | 0.008649 | '''
Created on 27-07-2012
@author: jurek
'''
from hra_math.utils.utils import print_import_error
try:
import os
import argparse
import glob
from hra_core.datetime_utils import invocation_time
from hra_core.misc import Separator
from hra_core.introspection import print_private_properties
from hra_core.introspection import copy_object
from hra_core.collections_utils import commas
from hra_core.collections_utils import nvl
from hra_core.io_utils import join_files
from hra_core.io_utils import as_path
from hra_core.io_utils import create_dir
from hra_math.utils.io_utils import shuffle_file
from hra_math.model.data_vector_file_data_source \
import DataVectorFileDataSource
from hra_math.model.parameters.poincare_plot_parameters \
import PoincarePlotParameters
from hra_math.model.parameters.poincare_plot_parameters \
import COMMON_PARAMETERS_GROUP
from hra_math.model.parameters.poincare_plot_parameters \
import MOVIE_PARAMETERS_GROUP
from hra_math.model.parameters.poincare_plot_parameters \
import STATISTICS_PARAMETERS_GROUP
from hra_math.statistics.statistics import get_statistics_names
from hra_math.statistics.statistics import ALL_STATISTICS
from hra_math.statistics.summary_statistics \
import get_summary_statistics_names
from hra_math.statistics.summary_statistics \
import ALL_SUMMARY_STATISTICS
from hra_math.time_domain.poincare_plot.filters.filter_utils \
import get_filters_short_names
from hra_math.time_domain.poincare_plot.poincare_plot_generator \
import PoincarePlotGenerator
from hra_math.time_domain.poincare_plot.poincare_plot_generator \
import CSVStartProgressGenerator
from hra_math.time_domain.poincare_plot.poincare_plot_generator \
import CSVProgressHandlerGenerator
from hra_math.time_domain.poincare_plot.poincare_plot_generator \
import MovieStartProgressGenerator
except ImportError as error:
print_import_error(__name__, error)
DEFAULT_OUTCOME_DIRECTORY = os.path.join(os.getcwd(), 'pp_outcomes')
def getSeparatorLabels():
"""
to get default separator label names
"""
return commas(Separator.getSeparatorsLabels())
class PoincarePlotManager(object):
def __init__(self):
self.__p__ = PoincarePlotParameters()
self.__progress_mark__ = None
self.__pp_generator__ = None
def __getattr__(self, name):
"""
if attribute starts and ends with two underscores
belongs to self object, otherwise to self.__p__ member
"""
if name.startswith('__') and name.endswith('__'):
return self.__dict__[name]
else:
return getattr(self.__dict__['__p__'], name)
def __setattr__(self, name, value):
"""
if attribute starts and ends with two underscores
sets for self object otherwise for self.__p__ member
"""
if name.startswith('__') and name.endswith('__'):
self.__dict__[name] = value
else:
setattr(self.__dict__['__p__'], name, value)
def generate(self):
if self.__p__.parameters_info_count == 0:
self.getParser()
self.__p__.prepareParameters()
self.__save_members__()
self.__pp_generator__ = PoincarePlotGenerator(parameters=self.__p__)
message = self.__p__.validateParameters()
if message:
print(message)
return
self.info(valued_only=True)
self.__process__(self.__process_file__)
def generate_movie(self):
if self.__p__.parameters_info_count == 0:
self.getParser()
self.__p__.prepareParameters()
self.__save_members__()
self.__pp_generator__ = PoincarePlotGenerator(parameters=self.__p__)
message = self.__p__.validateParameters()
if message:
print(message)
return
self.info(valued_only=True)
self.__process__(self.__process_file_for_movie__)
def __process__(self, _file_handler, disp=True, **params):
"""
the method which starts to generate Poincare Plot parameters
"""
sign_multiplicator = 80
file_counter = 0
if disp:
print('*' * sign_multiplicator)
if self.group_data_filename and self.data_file == None:
#create a group data input file
outfilename = as_path(self.output_dir,
"grp_" + self.group_data_filename)
joined = join_files(self.__data_filenames__(),
headers_count=self.headers_count,
outfilename=outfilename)
if joined:
self.data_file = outfilenam | e
if disp:
print('Using group data file: ' + self.data_file)
if self.data_file: # data_file parameter is superior to data_dir parameter @IgnorePep8
_data_file = self.__shuffle_file__(self.data_file)
if os.path.exists(_data_file) == False:
if disp:
print('The file: ' + _data_file + " doesn't exist")
else:
file_counter = 1
| if self.__p__.check_data_indexes(_data_file, disp):
_file_handler(_data_file, disp=disp, **params)
else:
for _file in self.__data_filenames__():
if os.path.isfile(_file):
file_counter = file_counter + 1
if disp:
print('=' * sign_multiplicator)
if not self.__p__.check_data_indexes(_file, disp):
continue
if _file_handler(_file, disp=disp, **params) == False:
break
if disp:
print('Processing finished')
if file_counter == 0:
print('No files to process [' + self.data_dir
+ self.extension + ']')
else:
print('Number of processed files: ' + str(file_counter))
@invocation_time
def __process_file__(self, _file, disp=False):
if disp:
if self.shuffle_data:
print('Processing shuffled file: ' + str(_file) + '\n')
else:
print('Processing file: ' + str(_file) + '\n')
file_data_source = DataVectorFileDataSource(_file=_file,
signal_index=self.signal_index,
annotation_index=self.annotation_index,
time_index=self.time_index,
headers_count=self.headers_count,
time_format=self.time_format,
separator=self.separator)
data_vector = file_data_source.getDataVector()
if data_vector.is_empty:
if disp:
print('No signal data or all data is skipped, check signal or time data columns !') # @IgnorePep8
return True
(ok, message) = self.__pp_generator__.precheck(reference_filename=_file) # @IgnorePep8
if ok == False:
if disp:
print('\n' + message)
return True
start_progress = CSVStartProgressGenerator()
start_progress.progress_mark = self.progress_mark
start_progress.info_handler = self.info_handler
start_progress.shuffle_data = self.shuffle_data
return self.__pp_generator__.generate_CSV(data_vector, _file,
start_progress=start_progress,
progress_handler=CSVProgressHandlerGenerator())
@invocation_time
def __process_file_for_movie__(self, _file, disp=False):
file_data_source = DataVectorFileDataSource(_file=_file,
signal_index=self.signal_index,
annotation_index=self.annotation_index,
time_index=self.time_index,
head |
alexis-roche/nipy | nipy/algorithms/clustering/bgmm.py | Python | bsd-3-clause | 36,777 | 0.000353 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Bayesian Gaussian Mixture Model Classes:
contains the basic fields and methods of Bayesian GMMs
the high level functions are/should be binded in C
The base class BGMM relies on an implementation that perfoms Gibbs sampling
A derived class VBGMM uses Variational Bayes inference instead
A third class is introduces to take advnatge of the old C-bindings,
but it is limited to diagonal covariance models
Author : Bertrand Thirion, 2008-2011
"""
from __future__ import print_function
from __future__ import absolute_import
import numpy as np
import numpy.random as nr
from scipy.linalg import inv, cholesky, eigvalsh
from scipy.special import gammaln
import math
from .utils import kmeans
from .gmm import GMM
##################################################################
# ancillary functions ############################################
##################################################################
def detsh(H):
"""
Routine for the computation of determinants of symmetric positive
matrices
Parameters
----------
H array of shape(n,n)
the input matrix, assumed symmmetric and positive
Returns
-------
dh: float, the determinant
"""
return np.prod(eigvalsh(H))
def dirichlet_eval(w, alpha):
"""
Evaluate the probability of a certain discrete draw w
from the Dirichlet density with parameters alpha
Parameters
----------
w: array of shape (n)
alpha: array of shape (n)
"""
if np.shape(w) != np.shape(alpha):
raise ValueError("incompatible dimensions")
loge = np.sum((alpha-1) * np.log(w))
logb = np.sum(gammaln(alpha)) - gammaln(alpha.sum())
loge -= logb
return np.exp(loge)
def generate_normals(m, P):
""" Generate a Gaussian sample with mean m and precision P
Parameters
----------
m array of shape n: the mean vector
P array of shape (n,n): the precision matrix
Returns
-------
ng : array of shape(n): a draw from the gaussian density
"""
icp = inv(cholesky(P))
ng = nr.randn(m.shape[0])
ng = np.dot(ng, icp)
ng += m
return ng
def generate_Wishart(n, V):
"""
Generate a sample from Wishart density
Parameters
----------
n: float,
the number of degrees of freedom of the Wishart density
V: array of shape (n,n)
the scale matrix of the Wishart density
Returns
-------
W: array of shape (n,n)
the draw from Wishart density
"""
icv = cholesky(V)
p = V.shape[0]
A = nr.randn(p, p)
for i in range(p):
A[i, i:] = 0
A[i, i] = np.sqrt(nr.chisquare(n - i))
R = np.dot(icv, A)
W = np.dot(R, R.T)
return W
def wishart_eval(n, V, W, dV=None, dW=None, piV=None):
"""Evaluation of the probability of W under Wishart(n,V)
Parameters
----------
n: float,
the number of degrees of freedom (dofs)
V: array of shape (n,n)
the scale matrix of the Wishart density
W: array of shape (n,n)
the sample to be evaluated
dV: float, optional,
determinant of V
dW: float, optional,
determinant of W
piV: array of shape (n,n), optional
inverse of V
Returns
-------
(float) the density
"""
# check that shape(V)==shape(W)
p = V.shape[0]
if dV is None:
dV = detsh(V)
if dW is None:
dW = detsh(W)
if piV is None:
piV = inv(V)
ldW = math.log(dW) * (n - p - 1) / 2
ltr = - np.trace(np.dot(piV, W)) / 2
la = (n * p * math.log(2) + math.log(dV) * n) / 2
lg = math.log(math.pi) * p * (p - 1) / 4
lg += gammaln(np.arange(n - p + 1, n + 1).astype(np.float) / 2).sum()
lt = ldW + ltr - la - lg
return math.exp(lt)
def normal_eval(mu, P, x, dP=None):
""" Probability of x under normal(mu, inv(P))
Parameters
----------
mu: array of shape (n),
the mean parameter
P: array of shape (n, n),
the precision matrix
x: array of shape (n),
the data to be evaluated
Returns
-------
(float) the density
"""
dim = P.shape[0]
if dP is None:
dP = detsh(P)
w0 = math.log(dP) - dim * math.log(2 * math.pi)
w0 /= 2
dx = mu - x
q = np.dot(np.dot(P, dx), dx)
w = w0 - q / 2
like = math.exp(w)
return like
def generate_perm(k, nperm=100):
"""
returns an array of shape(nbperm, k) representing
the permutations of k elements
Parameters
----------
k, int the number of elements to be permuted
nperm=100 the maximal number of permutations
if gamma(k+1)>nperm: only nperm random draws are generated
Returns
-------
p: array of shape(nperm,k): each row is permutation of k
"""
from scipy.special import gamma
if k == 1:
return np.reshape(np.array([0]), (1, 1)).astype(np.int)
if gamma(k + 1) < nperm:
# exhaustive permutations
aux = generate_perm(k - 1)
n = aux.shape[0]
perm = np.zeros((n * k, k)).astype(np.int)
for i in range(k):
perm[i * n:(i + 1) * n, :i] = aux[:, :i]
perm[i * n:(i + 1) * n, i] = k-1
perm[i * n:(i + 1) * n, i + 1:] = aux[:, i:]
else:
from numpy.random import rand
perm = np.zeros((nperm, k)).astype(np.int)
for i in range(nperm):
p = np.argsort(rand(k))
perm[i] = p
return perm
def multinomial(probabilities):
"""
Generate samples form a miltivariate distribution
Parameters
----------
probabilities: array of shape (nelements, nclasses):
likelihood of each element belongin to each class
each row is assumedt to sum to 1
One sample is draw from each row, resulting in
Returns
-------
z array of shape (nelements): the draws,
that take values in [0..nclasses-1]
"""
nvox = probabilities.shape[0]
nclasses = probabilities.shape[1]
cuml = np.zeros((nvox, nclasses + 1))
cuml[:, 1:] = np.cumsum(probabilities, 1)
aux = np.random.rand(nvox, 1)
z = np.argmax(aux < cuml, 1)-1
return z
def dkl_gaussian(m1, P1, m2, P2):
"""
Returns the KL divergence between gausians densities
Parameters
----------
m1: array of shape (n),
the mean parameter of the first density
P1: array of shape(n,n),
the precision parameters of the first density
m2: array of shape (n),
the mean parameter of the second density
P2: | array of shape(n,n),
the precision parameters of the second density
"""
tiny = 1.e-15
dim = np.size(m1)
if m1.shape != m2.shape:
raise ValueError("incompatible dimensions for m1 and m2")
if P1.shape != P2.shape:
raise ValueError("incompatible dimensions for P1 and P2")
if P1.shape | [0] != dim:
raise ValueError("incompatible dimensions for m1 and P1")
d1 = max(detsh(P1), tiny)
d2 = max(detsh(P2), tiny)
dkl = np.log(d1 / d2) + np.trace(np.dot(P2, inv(P1))) - dim
dkl += np.dot(np.dot((m1 - m2).T, P2), (m1 - m2))
dkl /= 2
return dkl
def dkl_wishart(a1, B1, a2, B2):
"""
returns the KL divergence bteween two Wishart distribution of
parameters (a1,B1) and (a2,B2),
Parameters
----------
a1: Float,
degrees of freedom of the first density
B1: array of shape(n,n),
scale matrix of the first density
a2: Float,
degrees of freedom of the second density
B2: array of shape(n,n),
scale matrix of the second density
Returns
-------
dkl: float, the Kullback-Leibler divergence
"""
from scipy.special import psi, gammaln
tiny = 1.e-15
if B1.shape != B2.shape:
raise ValueError("incompatible dimensions for B1 and B2")
dim = B1.shape[0]
d1 = max(detsh(B1), tiny)
d2 = max(detsh(B2), tiny)
lgc = dim * (dim - 1) * math.log(np.pi) / 4
lg1 = lgc
lg2 = lgc
lw1 = - math.log(d1) + dim * math.log(2)
lw2 = - math.log(d2) + |
lowks/SDST | setup.py | Python | mit | 788 | 0.005076 | from setuptools import setup, find_packages
import sys, os, glob |
version = '0.7.1'
setup(name='seqtools',
version=version,
description="",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi? | %3Aaction=list_classifiers
keywords='',
author='Sean Davis',
author_email='seandavi@gmail.com',
url='https://github.com/seandavi/seqtools',
license='MIT',
scripts=glob.glob('scripts/*'),
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
include_package_data=True,
zip_safe=False,
install_requires=[
'PyVCF>=0.6.5',
'pylev'
#'python-Levenshtein'
],
entry_points="""
# -*- Entry points: -*-
""",
)
|
npuichigo/ttsflow | third_party/tensorflow/tensorflow/contrib/framework/python/ops/ops.py | Python | apache-2.0 | 2,599 | 0.002309 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the L | icense at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed und | er the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes and functions used to construct graphs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
__all__ = ['get_graph_from_inputs',
'get_name_scope']
def get_graph_from_inputs(op_input_list, graph=None):
"""Returns the appropriate graph to use for the given inputs.
1. If `graph` is provided, we validate that all inputs in `op_input_list` are
from the same graph.
2. Otherwise, we attempt to select a graph from the first Operation- or
Tensor-valued input in `op_input_list`, and validate that all other
such inputs are in the same graph.
3. If the graph was not specified and it could not be inferred from
`op_input_list`, we attempt to use the default graph.
Args:
op_input_list: A list of inputs to an operation, which may include `Tensor`,
`Operation`, and other objects that may be converted to a graph element.
graph: (Optional) The explicit graph to use.
Raises:
TypeError: If `op_input_list` is not a list or tuple, or if graph is not a
Graph.
ValueError: If a graph is explicitly passed and not all inputs are from it,
or if the inputs are from multiple graphs, or we could not find a graph
and there was no default graph.
Returns:
The appropriate graph to use for the given inputs.
"""
# pylint: disable=protected-access
return ops._get_graph_from_inputs(op_input_list, graph)
def get_name_scope():
"""Returns the current name scope of the default graph.
For example:
```python
with tf.name_scope('scope1'):
with tf.name_scope('scope2'):
print(tf.contrib.framework.get_name_scope())
```
would print the string `scope1/scope2`.
Returns:
A string representing the current name scope.
"""
return ops.get_default_graph().get_name_scope()
|
NinjaMSP/crossbar | sitecustomize.py | Python | agpl-3.0 | 85 | 0 | try:
import c | overag | e
coverage.process_startup()
except ImportError:
pass
|
MathewWi/cube64-dx | notes/read-sram.py | Python | gpl-2.0 | 447 | 0.004474 | #!/usr/bin/env python
#
# Read the memory pak's 32k SRAM to a binary file specified on the command line. |
#
# --Micah Dowty <micah@navi.cx>
#
from bus import Bus
import sys
b = Bus()
if b.probe() != "memory":
sys.exit(1)
f = open(sys.argv[1], "wb")
addr = 0x0000
while addr < 0x8000:
sys.stdout.write("\r0x%04X (%.02f%%)" % (addr, addr * 100.0 / 0x8000))
f.write(b.read(addr))
addr += 32
sys.stdout.write("\n")
### The End | ###
|
explosiveduck/ed2d | ed2d/__init__.py | Python | bsd-2-clause | 161 | 0.006211 | de | f _init():
# any modules that need to be initialized early will be imported here
import ed2d.debug
ed2d.debug.debug('Debug module init.')
_init( | )
|
tylertian/Openstack | openstack F/glance/setup.py | Python | apache-2.0 | 2,108 | 0 | #!/usr/bin/python
# Copyright (c) 2010 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT | WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
from glance.openstack.common import setup
from glance.version import version_ | info as version
requires = setup.parse_requirements()
depend_links = setup.parse_dependency_links()
setuptools.setup(
name='glance',
version=version.canonical_version_string(always=True),
description='The Glance project provides services for discovering, '
'registering, and retrieving virtual machine images',
license='Apache License (2.0)',
author='OpenStack',
author_email='openstack@lists.launchpad.net',
url='http://glance.openstack.org/',
packages=setuptools.find_packages(exclude=['bin']),
test_suite='nose.collector',
cmdclass=setup.get_cmdclass(),
include_package_data=True,
install_requires=requires,
dependency_links=depend_links,
classifiers=[
'Development Status :: 4 - Beta',
'License :: OSI Approved :: Apache Software License',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.6',
'Environment :: No Input/Output (Daemon)',
],
scripts=['bin/glance',
'bin/glance-api',
'bin/glance-cache-prefetcher',
'bin/glance-cache-pruner',
'bin/glance-cache-manage',
'bin/glance-cache-cleaner',
'bin/glance-control',
'bin/glance-manage',
'bin/glance-registry',
'bin/glance-replicator',
'bin/glance-scrubber'],
py_modules=[])
|
jeremiah-c-leary/vhdl-style-guide | vsg/token/element_declaration.py | Python | gpl-3.0 | 366 | 0 |
from vsg import parser
class colon(parser.colon):
'''
unique_id = element_declaration : colon
'''
| def __init__(self, sString=':'):
parser.colon.__init__(self)
class semicolon(parser.semicolon):
'''
unique_id = element_declaration : semicolon
'''
def __init__(self, sString=';'):
parser.semicolon.__i | nit__(self)
|
huyhg/runtimes-common | runtime_builders/builder_util.py | Python | apache-2.0 | 5,271 | 0 | #!/usr/bin/python
# Copyright 2017 Google Inc. All rights reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import yaml
import subprocess
import sys
import tempfile
RUNTIME_BUCKET = 'runtime-builders'
RUNTIME_BUCKET_PREFIX = 'gs://{0}/'.format(RUNTIME_BUCKET)
MANIFEST_FILE = RUNTIME_BUCKET_PREFIX + 'runtimes.yaml'
SCHEMA_VERSION = 1
def copy_to_gcs(file_path, gcs_path):
command = ['gsutil', 'cp', file_path, gcs_path]
try:
output = subprocess.check_output(command)
logging.debug(output)
except subprocess.CalledProcessError as cpe:
logging.error('Error encountered when writing to GCS!', cpe)
except Exception as e:
logging.error('Fatal error encountered when shelling command {0}'
.format(command))
logging.error(e)
def write_to_gcs(gcs_path, file_contents):
try:
logging.info(gcs_path)
fd, f_name = tempfile.mkstemp(text=True)
os.write(fd, file_contents)
copy_to_gcs(f_name, gcs_path)
finally:
os.remove(f_name)
def get_file_from_gcs(gcs_file, temp_file):
command = ['gsutil', 'cp', gcs_file, temp_file]
try:
subprocess.check_output(command, stderr=subprocess.STDOUT)
return True
except subprocess.CalledProcessError as e:
logging.error('Error when retrieving file from G | CS! {0}'
.format(e.output))
return False
def verify_manifest(manifest):
"""Verify that the provided runtime manifest is valid before publishing.
Aliases are provided for runtime 'names' that can be included in users'
application configuration files: this method ensures that all the aliases
can resolve to actual builder files.
All builders and aliases are turned into nodes in a graph, which is then
traversed to be sure that all nodes lead down to a bu | ilder node.
Example formatting of the manifest, showing both an 'alias' and
an actual builder file:
runtimes:
java:
target:
runtime: java-openjdk
java-openjdk:
target:
file: gs://runtimes/java-openjdk-1234.yaml
deprecation:
message: "openjdk is deprecated."
"""
node_graph = _build_manifest_graph(manifest)
_verify_manifest_graph(node_graph)
def _verify_manifest_graph(node_graph):
for _, node in node_graph.items():
seen = set()
child = node
while True:
seen.add(child)
if not child.child:
break
elif child.child not in node_graph.keys():
logging.error('Non-existent alias provided for {0}: {1}'
.format(child.name, child.child))
sys.exit(1)
child = node_graph[child.child]
if child in seen:
logging.error('Circular dependency found in manifest! '
'Check node {0}'.format(child))
sys.exit(1)
if not child.isBuilder:
logging.error('No terminating builder for alias {0}'
.format(node.name))
sys.exit(1)
def _build_manifest_graph(manifest):
try:
node_graph = {}
for key, val in manifest.get('runtimes').iteritems():
target = val.get('target', {})
if not target:
if 'deprecation' not in val:
logging.error('No target or deprecation specified for '
'runtime: {0}'.format(key))
sys.exit(1)
continue
child = None
isBuilder = 'file' in target.keys()
if not isBuilder:
child = target['runtime']
node = node_graph.get(key, {})
if not node:
node_graph[key] = Node(key, isBuilder, child)
return node_graph
except (KeyError, AttributeError) as ke:
logging.error('Error encountered when verifying manifest:', ke)
sys.exit(1)
def load_manifest_file():
try:
_, tmp = tempfile.mkstemp(text=True)
command = ['gsutil', 'cp', MANIFEST_FILE, tmp]
subprocess.check_output(command, stderr=subprocess.STDOUT)
with open(tmp) as f:
return yaml.load(f)
except subprocess.CalledProcessError:
logging.info('Manifest file not found in GCS: creating new one.')
return {'schema_version': SCHEMA_VERSION}
finally:
os.remove(tmp)
class Node:
def __init__(self, name, isBuilder, child):
self.name = name
self.isBuilder = isBuilder
self.child = child
def __repr__(self):
return '{0}: {1}|{2}'.format(self.name, self.isBuilder, self.child)
|
xskh2007/zjump | jumpserver/urls.py | Python | gpl-2.0 | 955 | 0.002094 | from django.conf.urls import patterns, include, url
urlpatterns = patterns('jumpserver.views',
# Examples:
url(r'^$', 'index', name='index'),
# url(r'^api/user/$', 'api_user'),
ur | l(r'^skin_config/$', 'skin_config', name='skin_config'),
url(r'^login/$', 'Login', name='login'),
url(r'^logout/$', 'Logout', name='logout'),
url(r'^exec_cmd/$', 'exec_cmd', name='exec_cmd'),
url(r'^file/upload/$', 'upload', name='file_upload'),
url(r'^file/download/$', 'dow | nload', name='file_download'),
url(r'^setting', 'setting', name='setting'),
url(r'^terminal/$', 'web_terminal', name='terminal'),
url(r'^mylog/$', 'mylog', name='mylog'),
url(r'^juser/', include('juser.urls')),
url(r'^jasset/', include('jasset.urls')),
url(r'^jlog/', include('jlog.urls')),
url(r'^jperm/', include('jperm.urls')),
url(r'^dbtool/', include('dbtool.urls')),
url(r'^cachemanage/', include('cachemanage.urls')),
)
|
metaperl/clickmob | src/dhash.py | Python | mit | 2,158 | 0.000463 | __author__ = 'anicca'
# core
import math
import sys
from itertools import izip
# 3rd party
from PIL import Image, ImageChops
import argh
def dhash(image, hash_size=8):
# Grayscale and shrink the image in one step.
image = image.convert('L').resize(
(hash_size + 1, hash_size),
Image.ANTIALIAS,
)
pixels = list(image.getdata())
# Compare adjacent pixels.
difference = []
for row in range(hash_size):
for col in range(hash_size):
pixel_left = image.getpixel((col, row))
pixel_right = image.getpixel((col + 1, row))
difference.append(pixel_left > pixel_right)
# Convert the binary array to a hexadecimal string.
decimal_value = 0
hex_string = []
for index, value in enumerate(difference):
if value:
decimal_value += 2 ** (index % 8)
if (index % 8) == 7:
hex_string.append(hex(decimal_value)[2:].rjust(2, '0'))
decimal_value = 0
return ''.join(hex_string)
def rosetta(image1, image2):
i1 = Image.open(image1)
i2 = Image.open(image2)
assert i1.mode == i2.mode, "Different kinds of images."
print i1.size, i2.size
assert i1.size == i2.size, "Different sizes."
pairs = izip(i1.getdata(), i2.getdata())
| if len(i1.getbands()) == 1:
# for gray-scale jpegs
dif = sum(abs(p1 - p2) for p1, p2 in pairs)
else:
dif = sum(abs(c1 - c2) for p1, p2 in pairs for c1, c2 in zip(p1, p2))
ncomponents = | i1.size[0] * i1.size[1] * 3
retval = (dif / 255.0 * 100) / ncomponents
return retval
def rmsdiff_2011(im1, im2):
"Calculate the root-mean-square difference between two images"
im1 = Image.open(im1)
im2 = Image.open(im2)
diff = ImageChops.difference(im1, im2)
h = diff.histogram()
sq = (value * (idx ** 2) for idx, value in enumerate(h))
sum_of_squares = sum(sq)
rms = math.sqrt(sum_of_squares / float(im1.size[0] * im1.size[1]))
return rms
def main(image_filename1, image_filename2, dhash=False, rosetta=False, rmsdiff=False):
pass
if __name__ == '__main__':
argh.dispatch_command(main)
|
hasgeek/hasjob | migrations/versions/449914911f93_post_admins.py | Python | agpl-3.0 | 812 | 0.002463 | """Post admins
Revision ID: 449914911f93
Revises: 2420dd9c9949
Create Date: 2013-12-03 23:03:02.404457
"""
# revision identifiers, used by Alembic.
revision = '449914911f93'
down_revision = '2420dd9c9949'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'jobpost_admin',
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('updated_at', sa.DateTime(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('jobpost_id', sa.Integer(), nullabl | e=False),
sa.ForeignKeyConstraint(['jobpost_id'], ['jobpost.id']),
sa.ForeignKeyConstraint(['user_id'], ['user.id']),
sa.Prim | aryKeyConstraint('user_id', 'jobpost_id'),
)
def downgrade():
op.drop_table('jobpost_admin')
|
Abdoctor/behave | tests/issues/test_issue0453.py | Python | bsd-2-clause | 2,081 | 0.003029 | # -*- coding: UTF-8 -*-
"""
MAYBE: DUPLICATES: #449
NOTE: traceback2 (backport for Python2) solves the problem.
def foo(stop):
raise Exception(u"по русски")
Result:
File "features/steps/steps.py", line 8, in foo
raise Exception(u"по ����ки") <-- This is not
Exception: по русски <-- This is OK
It happens here (https://github.com/behave/behave/blob/master/behave/model.py#L1299)
| because traceback.format_exc() creates incorrect text.
You then convert it using _text() and result is also bad.
To fix it, you may take e.message which is correct and traceback.format_tb(sys.exc_info()[2])
which is also correct.
"""
from __future | __ import print_function
from behave.textutil import text
from hamcrest.core import assert_that, equal_to
from hamcrest.library import contains_string
import six
import pytest
if six.PY2:
import traceback2 as traceback
else:
import traceback
def problematic_step_impl(context):
raise Exception(u"по русски")
@pytest.mark.parametrize("encoding", [None, "UTF-8", "unicode_escape"])
def test_issue(encoding):
"""
with encoding=UTF-8:
File "/Users/jens/se/behave_main.unicode/tests/issues/test_issue0453.py", line 31, in problematic_step_impl
raise Exception(u"по русски")
Exception: \u043f\u043e \u0440\u0443\u0441\u0441\u043a\u0438
with encoding=unicode_escape:
File "/Users/jens/se/behave_main.unicode/tests/issues/test_issue0453.py", line 31, in problematic_step_impl
raise Exception(u"по ÑÑÑÑки")
Exception: по русски
"""
context = None
text2 = ""
expected_text = u"по русски"
try:
problematic_step_impl(context)
except Exception:
text2 = traceback.format_exc()
text3 = text(text2, encoding)
print(u"EXCEPTION-TEXT: %s" % text3)
assert_that(text3, contains_string(u'raise Exception(u"по русски"'))
assert_that(text3, contains_string(u"Exception: по русски"))
|
CODAIT/graph_def_editor | graph_def_editor/reroute.py | Python | apache-2.0 | 19,100 | 0.006283 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Various function for graph rerouting."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
from graph_def_editor import node, subgraph, util
_allowed_symbols = [
"swap_ts",
"reroute_ts",
"swap_inputs",
"reroute_inputs",
"swap_outputs",
"reroute_outputs",
"swap_ios",
"reroute_ios",
"remove_control_inputs",
"add_control_inputs",
]
def _check_ts_compatibility(ts0, ts1):
"""Make sure the shape and dtype of two lists of tensors are compatible.
Args:
ts0: an object convertible to a list of `gde.Tensor`.
ts1: an object convertible to a list of `gde.Tensor`.
Raises:
ValueError: if any pair of tensors (same index in ts0 and ts1) have
a dtype or a shape which is not compatible.
"""
ts0 = util.make_list_of_t(ts0)
ts1 = util.make_list_of_t(ts1)
if len(ts0) != len(ts1):
raise ValueError("ts0 and ts1 have different sizes: {} != {}".format(
len(ts0), len(ts1)))
for t0, t1 in zip(ts0, ts1):
# check dtype
dtype0, dtype1 = t0.dtype, t1.dtype
if not dtype0.is_compatible_with(dtype1):
raise ValueError("Dtypes {} and {} of tensors {} and {} are not "
"compatible.".format(dtype0, dtype1, t0.name, t1.name))
# check shape
shape0, shape1 = t0.shape, t1.shape
if not shape0.is_compatible_with(shape1):
raise ValueError("Shapes {} and {} of tensors {} and {} are not "
"compatible.".format(shape0, shape1, t0.name, t1.name))
class _RerouteMode(object):
"""Enums for reroute's mode.
swap: the end of tensors a and b are swapped.
a2b: the end of the tensor a are also rerouted to the end of the tensor b
(the end of b is left dangling).
b2a: the end of the tensor b are also rerouted to the end of the tensor a
(the end of a is left dangling).
"""
swap, a2b, b2a = range(3)
@classmethod
def check(cls, mode):
"""Check swap mode.
Args:
mode: an integer representing one of the modes.
Returns:
A tuple `(a2b, b2a)` boolean indicating what rerouting needs doing.
Raises:
ValueError: if mode is outside the enum range.
"""
if mode == cls.swap:
return True, True
elif mode == cls.b2a:
return False, True
elif mode == cls.a2b:
return True, False
else:
raise ValueError("Unknown _RerouteMode: {}".format(mode))
def _reroute_t(t0, t1, consumers1, can_modify=None, cannot_modify=None):
"""Reroute the end of the tensors (t0,t1).
Warning: this function is directly manipulating the internals of the
`gde.Graph`.
Args:
t0: a `gde.Tensor`.
t1: a `gde.Tensor`.
consumers1: The consumers of t1 which needs to be rerouted.
can_modify: iterable of operations which can be modified. Any operation
outside within_ops will be left untouched by this function.
cannot_modify: iterable of operations which cannot be modified.
Any operation within cannot_modify will be left untouched by this
function.
Returns:
The number of individual modifications made by the function.
"""
nb_update_inputs = 0
if can_modify is not None:
consumers1 &= can_modify
if cannot_modify is not None:
consumers1 -= cannot_modify
consumers1_indices = {}
for consumer1 in consumers1:
consumers1_indices[consumer1] = [i for i, t in enumerate(consumer1.inputs)
if t is t1]
for consumer1 in consumers1:
for i in consumers1_indices[consumer1]:
consumer1.replace_input(i, t0)
nb_update_inputs += 1
return nb_update_inputs
def _reroute_ts(ts0, ts1, mode, can_modify=None, cannot_modify=None):
"""Reroute the end of the tensors in each pair (t0,t1) in ts0 x ts1.
This function is the back-bone of the Graph-Editor. It is essentially a thin
wrapper on top of `gde.Node.replace_input`.
Given a pair of tensor t0, t1 in ts0 x ts1, this function re-route the end
of t0 and t1 in three possible ways:
1) The reroute mode is "a<->b" or "b<->a": the tensors' end are swapped. After
this operation, the previous consumers of t0 are now consumers of t1 and
vice-versa.
2) The reroute mode is "a->b": the tensors' end of t0 are re-routed to the
tensors's end of t1 (which are left dangling). After this operation, the
previous consumers of t0 are still consuming t0 but the previous consumers of
t1 are not also consuming t0. The tensor t1 has no consumer.
3) The reroute mode is "b->a": this mode is the symmetric of the "a->b" mode.
Note that this function is re-routing the end of two tensors, not the start.
Re-routing the start of two tensors is not supported by this library. The
reason for that is the following: TensorFlow, by design, creates a strong bond
between an op and its output tensor. This Graph editor follows this design and
treats an operation A and its generating tensors {t_i} as an entity which
cannot be broken. In other words, an op cannot be detached from any of its
output tensors, ever. But it is possible to detach an op from its input
tensors, which is what this function concerns itself with.
Warning: this function is directly manipulating the internals of the `gde.Graph`.
Args:
ts0: an object convertible to a list of `gde.Tensor`.
ts1: an object convertible to a list of `gde.Tensor`.
mode: what to do with those tensors: "a<->b" or "b<->a" for swapping and
"a->b" or "b->a" for one direction re-routing.
can_modify: iterable of operations which can be modified. Any operation
outside within_ops will be left untouched by this function.
cannot_modify: iterable of operations which cannot be modified.
Any operation within cannot_modify will be left untouched by this
function.
Returns:
The number of individual modifications made by the fun | ction.
Raises:
TypeError: if `ts0` or `ts1` cannot be converted to a list of `gde. | Tensor`.
TypeError: if `can_modify` or `cannot_modify` is not `None` and cannot be
converted to a list of `gde.Node`.
"""
a2b, b2a = _RerouteMode.check(mode)
ts0 = util.make_list_of_t(ts0)
ts1 = util.make_list_of_t(ts1)
_check_ts_compatibility(ts0, ts1)
if cannot_modify is not None:
cannot_modify = frozenset(util.make_list_of_op(cannot_modify))
if can_modify is not None:
can_modify = frozenset(util.make_list_of_op(can_modify))
nb_update_inputs = 0
precomputed_consumers = []
# precompute consumers to avoid issue with repeated tensors:
for t0, t1 in zip(ts0, ts1):
consumers0 = set(t0.consumers())
consumers1 = set(t1.consumers())
precomputed_consumers.append((consumers0, consumers1))
for t0, t1, consumers in zip(ts0, ts1, precomputed_consumers):
if t0 is t1:
continue # Silently ignore identical tensors.
consumers0, consumers1 = consumers
if a2b:
nb_update_inputs += _reroute_t(t0, t1, consumers1, can_modify,
cannot_modify)
if b2a:
nb_update_inputs += _reroute_t(t1, t0, consumers0, can_modify,
cannot_modify)
return nb_update_inputs
def swap_ts(ts0, ts1, can_modify=None, cannot_modify=None):
"""For each tensor's pair, swap the end of (t0,t1).
B0 B1 B0 B1
| | => X
A0 A1 A0 A1
Args:
ts0: an object convertible to a list of `gde.Tensor`.
ts1: an object convertible to a list of |
rbuffat/pyidf | tests/test_setpointmanagerfollowsystemnodetemperature.py | Python | apache-2.0 | 3,028 | 0.004293 | import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.setpoint_managers import SetpointManagerFollowSystemNodeTemperature
log = logging.getLogger(__name__)
class TestSetpointManagerFollowSystemNodeTemperature(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_setpointmanagerfollowsystemnodetemperature(self):
pyidf.validation_level = ValidationLevel.error
obj = SetpointManagerFollowSystemNodeTemperature()
# alpha
var_name = "Name"
obj.name = var_name
# alpha
var_control_variable = "Temperature"
obj.control_variable = var_control_variable
# node
var_reference_node_name = "node|Reference Node Name"
obj.reference_node_name = var_reference_node_name
# alpha
var_reference_temperature_type = "NodeWetBulb"
obj.reference_temperature_type = var_reference_temperature_type
# real
var_offset_temperature_difference = 5.5
obj.offset_temperature_difference = var_offset_temperature_difference
# real
var_maximum_limit_setpoint_temperature = 6.6
obj.maximum_limit_setpoint_temperature = var_maximum_limit_setpoint_temperature
# real
var_minimum_limit_setpoint_temperature = 7.7
obj.minimum_limit_setpoint_temperature = var_minimum_limit_setpoint_temperature
# node
var_setpoint_node_or_nodelist_name = "node|Setpoint Node or NodeList Name"
obj.setpoint_node_or_nodelist_name = var_setpoint_node_or_nodelist_name
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.setpointmanagerfollowsystemnodetemperatures[0].name, var_name)
self.assertEqual(idf2.setpointmanagerfollowsystemnodetemperatures[0].control_variable, var_control_variable)
self.assertEqual(idf2.setpointmanagerfollowsystemnodetemperatures[0].reference_node_name, var_reference_node_name)
self.assertEqual(idf2.setpointmanagerfollowsystemnodetemperatures[0].reference_temperature_type, var_reference_temperature_type)
self.assertAlmostEqual(idf2.setpointmanagerfollowsystemnodetemperatures[0].offset_temperature_difference, var_offset_temperature_difference)
self.assertAlmostEqual(idf2.setpointmanagerfollowsystemnodetemperatures[0].maximum_limit | _setpoint_temperature, var_maximum_limit_setpoint_temperature)
self.assertAlmostEqual(idf2.setpointmanagerfollowsystemnodetemperatures[0].minimum_limit_setpoint_temperature, var_minimum_limit_setpoint_temperature)
self.assertEqual(idf2.setpointmanagerfollowsys | temnodetemperatures[0].setpoint_node_or_nodelist_name, var_setpoint_node_or_nodelist_name) |
KiChjang/servo | tests/wpt/web-platform-tests/tools/third_party/h2/test/test_settings.py | Python | mpl-2.0 | 16,680 | 0 | # -*- coding: utf-8 -*-
"""
test_settings
~~~~~~~~~~~~~
Test the Settings object.
"""
import pytest
import h2.errors
import h2.exceptions
import h2.settings
from hypothesis import given, assume
from hypothesis.strategies import (
integers, booleans, fixed_dictionaries, builds
)
class TestSettings(object):
"""
Test the Settings object behaves as expected.
"""
def test_settings_defaults_client(self):
"""
The Settings object begins with the appropriate defaults for clients.
"""
s = h2.settings.Settings(client=True)
assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 4096
assert s[h2.settings.SettingCodes.ENABLE_PUSH] == 1
assert s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] == 65535
assert s[h2.settings.SettingCodes.MAX_FRAME_SIZE] == 16384
assert s[h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL] == 0
def test_settings_defaults_server(self):
"""
The Settings object begins with the appropriate defaults for servers.
"""
s = h2.settings.Settings(client=False)
assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 4096
assert s[h2.settings.SettingCodes.ENABLE_PUSH] == 0
assert s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] == 65535
assert s[h2.settings.SettingCodes.MAX_FRAME_SIZE] == 16384
assert s[h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL] == 0
@pytest.mark.parametrize('client', [True, False])
def test_can_set_initial_values(self, client):
"""
The Settings object can be provided initial values that override the
defaults.
"""
overrides = {
h2.settings.SettingCodes.HEADER_TABLE_SIZE: 8080,
h2.settings.SettingCodes.MAX_FRAME_SIZE: 16388,
h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS: 100,
h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE: 2**16,
h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL: 1,
}
s = h2.settings.Settings(client=client, initial_values=overrides)
assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 8080
assert s[h2.settings.SettingCodes.ENABLE_PUSH] == bool(client)
assert s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] == 65535
assert s[h2.settings.SettingCodes.MAX_FRAME_SIZE] == 16388
assert s[h2.settings.SettingCodes.MAX_CONCURRENT_STREAMS] == 100
assert s[h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE] == 2**16
assert s[h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL] == 1
@pytest.mark.parametrize(
'setting,value',
[
(h2.settings.SettingCodes.ENABLE_PUSH, 2),
(h2.settings.SettingCodes.ENABLE_PUSH, -1),
(h2.settings.SettingCodes.INITIAL_WINDOW_SIZE, -1),
(h2.settings.SettingCodes.INITIAL_WINDOW_SIZE, 2**34),
(h2.settings.SettingCodes.MAX_FRAME_SIZE, 1),
(h2.settings.SettingCodes.MAX_FRAME_SIZE, 2**30),
(h2.settings.SettingCodes.MAX_HEADER_LIST_SIZE, -1),
(h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL, -1),
]
)
def test_cannot_set_invalid_initial_values(self, setting, value):
"""
The Settings object can be provided initial values that override the
defaults.
"""
overrides = {setting: value}
with pytest.raises(h2.exceptions.InvalidSettingsValueError):
h2.settings.Settings(initial_values=overrides)
def test_applying_value_doesnt_take_effect_immediately(self):
"""
When a value is applied to the settings object, it doesn't immediately
take effect.
"""
s = h2.settings.Settings(client=True)
s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 8000
assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 4096
def test_acknowledging_values(self):
"""
When we acknowledge settings, the values change.
"""
s = h2.settings.Settings(client=True)
old_settings = dict(s)
new_settings = {
h2.settings.SettingCodes.HEADER_TABLE_SIZE: 4000,
h2.settings.SettingCodes.ENABLE_PUSH: 0,
h2.settings.SettingCodes.INITIAL_WINDOW_SIZE: 60,
h2.settings.SettingCodes.MAX_FRAME_SIZE: 16385,
h2.settings.SettingCodes.ENABLE_CONNECT_PROTOCOL: 1,
}
s.update(new_settings)
assert dict(s) == old_settings
s.acknowledge()
assert dict(s) == new_settings
def test_acknowledging_returns_the_changed_settings(self):
| """
Acknowledging settings returns the changes.
"""
s = h2.settings.Settings(client=True)
s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] = 8000
s[h2.settings.SettingCode | s.ENABLE_PUSH] = 0
changes = s.acknowledge()
assert len(changes) == 2
table_size_change = (
changes[h2.settings.SettingCodes.HEADER_TABLE_SIZE]
)
push_change = changes[h2.settings.SettingCodes.ENABLE_PUSH]
assert table_size_change.setting == (
h2.settings.SettingCodes.HEADER_TABLE_SIZE
)
assert table_size_change.original_value == 4096
assert table_size_change.new_value == 8000
assert push_change.setting == h2.settings.SettingCodes.ENABLE_PUSH
assert push_change.original_value == 1
assert push_change.new_value == 0
def test_acknowledging_only_returns_changed_settings(self):
"""
Acknowledging settings does not return unchanged settings.
"""
s = h2.settings.Settings(client=True)
s[h2.settings.SettingCodes.INITIAL_WINDOW_SIZE] = 70
changes = s.acknowledge()
assert len(changes) == 1
assert list(changes.keys()) == [
h2.settings.SettingCodes.INITIAL_WINDOW_SIZE
]
def test_deleting_values_deletes_all_of_them(self):
"""
When we delete a key we lose all state about it.
"""
s = h2.settings.Settings(client=True)
s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 8000
del s[h2.settings.SettingCodes.HEADER_TABLE_SIZE]
with pytest.raises(KeyError):
s[h2.settings.SettingCodes.HEADER_TABLE_SIZE]
def test_length_correctly_reported(self):
"""
Length is related only to the number of keys.
"""
s = h2.settings.Settings(client=True)
assert len(s) == 5
s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 8000
assert len(s) == 5
s.acknowledge()
assert len(s) == 5
del s[h2.settings.SettingCodes.HEADER_TABLE_SIZE]
assert len(s) == 4
def test_new_values_work(self):
"""
New values initially don't appear
"""
s = h2.settings.Settings(client=True)
s[80] = 81
with pytest.raises(KeyError):
s[80]
def test_new_values_follow_basic_acknowledgement_rules(self):
"""
A new value properly appears when acknowledged.
"""
s = h2.settings.Settings(client=True)
s[80] = 81
changed_settings = s.acknowledge()
assert s[80] == 81
assert len(changed_settings) == 1
changed = changed_settings[80]
assert changed.setting == 80
assert changed.original_value is None
assert changed.new_value == 81
def test_single_values_arent_affected_by_acknowledgement(self):
"""
When acknowledged, unchanged settings remain unchanged.
"""
s = h2.settings.Settings(client=True)
assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 4096
s.acknowledge()
assert s[h2.settings.SettingCodes.HEADER_TABLE_SIZE] == 4096
def test_settings_getters(self):
"""
Getters exist for well-known settings.
"""
s = h2.settings.Settings(client=True)
assert s.header_table_size == (
s[h2.settings.SettingCodes.HEADER_TABLE_SIZE]
)
assert s.enable_push == s[h2.settings.SettingCodes.ENABLE_PUSH]
|
bgris/ODL_bgris | lib/python3.5/site-packages/qtawesome/iconic_font.py | Python | gpl-3.0 | 14,406 | 0.000208 | r"""
Iconic Font
===========
A lightweight module handling iconic fonts.
It is designed to provide a simple way for creating QIcons from glyphs.
From a user's viewpoint, the main entry point is the ``IconicFont`` class which
contains methods for loading new iconic fonts with their character map and
methods returning instances of ``QIcon``.
"""
# Standard library imports
from __future__ import print_function
import json
import os
import hashlib
import warnings
# Third party imports
from qtpy.QtCore import QObject, QPoint, QRect, qRound, Qt
from qtpy.QtGui import (QColor, QFont, QFontDatabase, QIcon, QIconEngine,
QPainter, QPixmap)
from qtpy.QtWidgets import QApplication
from six import unichr
# Linux packagers, please set this to True if you want to make qtawesome
# use system fonts
SYSTEM_FONTS = False
_default_options = {
'color': QColor(50, 50, 50),
'color_disabled': QColor(150, 150, 150),
'opacity': 1.0,
'scale_factor': 1.0,
}
def set_global_defaults(**kwargs):
"""Set global defaults for the options passed to the icon painter."""
valid_options = [
'active', 'selected', 'disabled', 'on', 'off',
'on_active', 'on_selected', 'on_disabled',
'off_active', 'off_selected', 'off_disabled',
'color', 'color_on', 'color_off',
'color_active', 'color_selected', 'color_disabled',
'color_on_selected', 'color_on_active', 'color_on_disabled',
'color_off_selected', 'color_off_active', 'color_off_disabled',
'animation', 'offset', 'scale_factor',
]
for kw in kwargs:
if kw in valid_options:
_default_options[kw] = kwargs[kw]
else:
error = "Invalid option '{0}'".format(kw)
raise KeyError(error)
class CharIconPainter:
"""Char icon painter."""
def paint(self, iconic, painter, rect, mode, state, options):
"""Main paint method."""
for opt in options:
self._paint_icon(iconic, painter, rect, mode, state, opt)
def _paint_icon(self, iconic, painter, rect, mode, state, options):
"""Paint a single icon."""
painter.save()
color = options['color']
char = options['char']
color_options = {
QIcon.On: {
QIcon.Normal: (options['color_on'], options['on']),
QIcon.Disabled: (options['color_on_disabled'],
options['on_disabled']),
QIcon.Active: (options['color_on_active'],
options['on_active']),
QIcon.Selected: (options['color_on_selected'],
options['on_selected'])
},
QIcon.Off: {
QIcon.Normal: (options['color_off'], options['off']),
QIcon.Disabled: (options['color_off_disabled'],
options['off_disabled']),
QIcon.Active: (options['color_off | _active'],
options['off_active']),
QIcon.Selected: (options['color_off_selected'],
options['off_selected'])
}
}
color | , char = color_options[state][mode]
painter.setPen(QColor(color))
# A 16 pixel-high icon yields a font size of 14, which is pixel perfect
# for font-awesome. 16 * 0.875 = 14
# The reason why the glyph size is smaller than the icon size is to
# account for font bearing.
draw_size = 0.875 * qRound(rect.height() * options['scale_factor'])
prefix = options['prefix']
# Animation setup hook
animation = options.get('animation')
if animation is not None:
animation.setup(self, painter, rect)
painter.setFont(iconic.font(prefix, draw_size))
if 'offset' in options:
rect = QRect(rect)
rect.translate(options['offset'][0] * rect.width(),
options['offset'][1] * rect.height())
painter.setOpacity(options.get('opacity', 1.0))
painter.drawText(rect, Qt.AlignCenter | Qt.AlignVCenter, char)
painter.restore()
class FontError(Exception):
"""Exception for font errors."""
class CharIconEngine(QIconEngine):
"""Specialization of QIconEngine used to draw font-based icons."""
def __init__(self, iconic, painter, options):
super(CharIconEngine, self).__init__()
self.iconic = iconic
self.painter = painter
self.options = options
def paint(self, painter, rect, mode, state):
self.painter.paint(
self.iconic, painter, rect, mode, state, self.options)
def pixmap(self, size, mode, state):
pm = QPixmap(size)
pm.fill(Qt.transparent)
self.paint(QPainter(pm), QRect(QPoint(0, 0), size), mode, state)
return pm
class IconicFont(QObject):
"""Main class for managing iconic fonts."""
def __init__(self, *args):
"""IconicFont Constructor.
Parameters
----------
``*args``: tuples
Each positional argument is a tuple of 3 or 4 values:
- The prefix string to be used when accessing a given font set,
- The ttf font filename,
- The json charmap filename,
- Optionally, the directory containing these files. When not
provided, the files will be looked for in ``./fonts/``.
"""
super(IconicFont, self).__init__()
self.painter = CharIconPainter()
self.painters = {}
self.fontname = {}
self.charmap = {}
for fargs in args:
self.load_font(*fargs)
def load_font(self, prefix, ttf_filename, charmap_filename, directory=None):
"""Loads a font file and the associated charmap.
If ``directory`` is None, the files will be looked for in ``./fonts/``.
Parameters
----------
prefix: str
Prefix string to be used when accessing a given font set
ttf_filename: str
Ttf font filename
charmap_filename: str
Charmap filename
directory: str or None, optional
Directory for font and charmap files
"""
def hook(obj):
result = {}
for key in obj:
result[key] = unichr(int(obj[key], 16))
return result
if directory is None:
directory = os.path.join(
os.path.dirname(os.path.realpath(__file__)), 'fonts')
# Load font
if QApplication.instance() is not None:
id_ = QFontDatabase.addApplicationFont(os.path.join(directory,
ttf_filename))
loadedFontFamilies = QFontDatabase.applicationFontFamilies(id_)
if(loadedFontFamilies):
self.fontname[prefix] = loadedFontFamilies[0]
else:
raise FontError(u"Font at '{0}' appears to be empty. "
"If you are on Windows 10, please read "
"https://support.microsoft.com/"
"en-us/kb/3053676 "
"to know how to prevent Windows from blocking "
"the fonts that come with QtAwesome.".format(
os.path.join(directory, ttf_filename)))
with open(os.path.join(directory, charmap_filename), 'r') as codes:
self.charmap[prefix] = json.load(codes, object_hook=hook)
# Verify that vendorized fonts are not corrupt
if not SYSTEM_FONTS:
md5_hashes = {'fontawesome-webfont.ttf':
'a3de2170e4e9df77161ea5d3f31b2668',
'elusiveicons-webfont.ttf':
'207966b04c032d5b873fd595a211582e'}
ttf_hash = md5_hashes.get(ttf_filename, None)
if ttf_hash is not None:
hasher = hashlib.md5()
with open(os.path. |
russellgeoff/blog | Control/Controllers/target_list.py | Python | gpl-3.0 | 2,121 | 0.003772 | '''
Copyright (C) 2014 Travis DeWolf
This program is free software: you can redistribute it and/or modify
it under the | terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
a | long with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import numpy as np
class Shell(object):
"""
"""
def __init__(self, controller, target_list,
threshold=.01, pen_down=False):
"""
control Control instance: the controller to use
pen_down boolean: True if the end-effector is drawing
"""
self.controller = controller
self.pen_down = pen_down
self.target_list = target_list
self.threshold = threshold
self.not_at_start = True
self.target_index = 0
self.set_target()
def control(self, arm):
"""Move to a series of targets.
"""
if self.controller.check_distance(arm) < self.threshold:
if self.target_index < len(self.target_list)-1:
self.target_index += 1
self.set_target()
self.controller.apply_noise = True
self.not_at_start = not self.not_at_start
self.pen_down = not self.pen_down
self.u = self.controller.control(arm)
return self.u
def set_target(self):
"""
Set the current target for the controller.
"""
if self.target_index == len(self.target_list)-1:
target = [1, 2]
else:
target = self.target_list[self.target_index]
if target[0] != target[0]: # if it's NANs
self.target_index += 1
self.set_target()
else:
self.controller.target = target
|
istommao/wechatkit | wechatkit/exceptions.py | Python | mit | 437 | 0 | """Wechatkit exception module."""
cl | ass WechatKitBaseException(Exception):
"""Wechatkit base Exception."""
def __init__(self, error_info):
"""Init."""
super(WechatKitBaseException, self).__init__(error_info)
self.error_info = error_info
class WechatKitException(WechatKitBaseException):
"""Wechatkit Exception."""
class WechatSignException(WechatKitException):
"""Wechat Si | gn Exception."""
|
kliput/onezone-gui | bamboos/docker/environment/appmock.py | Python | mit | 6,421 | 0.000312 | # coding=utf-8
"""Authors: Łukasz Opioła, Konrad Zemek
Copyright (C) 2015 ACK CYFRONET AGH
This software is released under the MIT license cited in 'LICENSE.txt'
Brings up a set of appmock instances.
"""
import copy
import json
import os
import random
import string
from timeouts import *
from . import common, docker, dns, cluster_manager, worker
def domain(appmock_instance, uid):
"""Formats domain for an appmock instance.
It is intended to fake OP or OZ domain.
"""
return common.format_hostname(appmock_instance, uid)
def appmock_hostname(node_name, uid):
"""Formats hostname for a docker hosting appmock.
NOTE: Hostnames are also used as docker names!
"""
return common.format_hostname(node_name, uid)
def appmock_erl_node_name(node_name, uid):
"""Formats erlang node name for a vm on appmock docker.
"""
hostname = appmock_hostname(node_name, uid)
return common.format_erl_node_name('appmock', hostname)
def _tweak_config(config, appmock_node, appmock_instance, uid):
cfg = copy.deepcopy(config)
cfg['nodes'] = {'node': cfg['nodes'][appmock_node]}
mocked_app = 'none'
if 'mocked_app' in cfg['nodes']['node']:
mocked_app = cfg['nodes']['node']['mocked_app']
# Node name depends on mocked app, if none is specified,
# default appmock_erl_node_name will be used.
node_name = {
'cluster_manager': cluster_manager.cm_erl_node_name(appmock_node,
appmock_instance,
uid),
'op_worker': worker.worker_erl_node_name | (appmock_node,
appmock_instance,
uid),
'oz_worker': worker.worker_erl_node_n | ame(appmock_node, appmock_instance, uid)
}.get(mocked_app, appmock_erl_node_name(appmock_node, uid))
if 'vm.args' not in cfg['nodes']['node']:
cfg['nodes']['node']['vm.args'] = {}
vm_args = cfg['nodes']['node']['vm.args']
vm_args['name'] = node_name
# If cookie is not specified, set random cookie
# so the node does not try to connect to others
if 'setcookie' not in vm_args:
vm_args['setcookie'] = ''.join(
random.sample(string.ascii_letters + string.digits, 16))
return cfg
def _node_up(image, bindir, config, config_path, dns_servers, logdir):
node_name = config['nodes']['node']['vm.args']['name']
(name, sep, hostname) = node_name.partition('@')
sys_config = config['nodes']['node']['sys.config']['appmock']
# can be an absolute path or relative to gen_dev_args.json
app_desc_file_path = sys_config['app_description_file']
app_desc_file_name = os.path.basename(app_desc_file_path)
app_desc_file_path = os.path.join(common.get_file_dir(config_path),
app_desc_file_path)
# file_name must be preserved as it must match the Erlang module name
sys_config['app_description_file'] = '/tmp/' + app_desc_file_name
command = '''mkdir -p /root/bin/node/log/
bindfs --create-for-user={uid} --create-for-group={gid} /root/bin/node/log /root/bin/node/log
set -e
cat <<"EOF" > /tmp/{app_desc_file_name}
{app_desc_file}
EOF
cat <<"EOF" > /tmp/gen_dev_args.json
{gen_dev_args}
EOF
escript bamboos/gen_dev/gen_dev.escript /tmp/gen_dev_args.json
/root/bin/node/bin/appmock console
sleep 5''' # Add sleep so logs can be chowned
command = command.format(
uid=os.geteuid(),
gid=os.getegid(),
app_desc_file_name=app_desc_file_name,
app_desc_file=open(app_desc_file_path, 'r').read(),
gen_dev_args=json.dumps({'appmock': config}))
bindir = os.path.abspath(bindir)
volumes = ['/root/bin', (bindir, bindir, 'ro')]
if logdir:
logdir = os.path.join(os.path.abspath(logdir), hostname)
os.makedirs(logdir)
volumes.extend([(logdir, '/root/bin/node/log', 'rw')])
container = docker.run(
image=image,
name=hostname,
hostname=hostname,
detach=True,
interactive=True,
tty=True,
workdir=bindir,
volumes=volumes,
dns_list=dns_servers,
privileged=True,
command=command)
return container, {
'docker_ids': [container],
'appmock_nodes': [node_name]
}
def _ready(node):
node_ip = docker.inspect(node)['NetworkSettings']['IPAddress']
return common.nagios_up(node_ip, '9999')
def up(image, bindir, dns_server, uid, config_path, logdir=None):
config = common.parse_json_config_file(config_path)
input_dir = config['dirs_config']['appmock']['input_dir']
dns_servers, output = dns.maybe_start(dns_server, uid)
for appmock_instance in config['appmock_domains']:
gen_dev_cfg = {
'config': {
'input_dir': input_dir,
'target_dir': '/root/bin'
},
'nodes': config['appmock_domains'][appmock_instance]['appmock']
}
tweaked_configs = [_tweak_config(gen_dev_cfg, appmock_node,
appmock_instance, uid)
for appmock_node in gen_dev_cfg['nodes']]
include_domain = False
appmock_ips = []
appmocks = []
for cfg in tweaked_configs:
appmock_id, node_out = _node_up(image, bindir, cfg,
config_path, dns_servers, logdir)
appmocks.append(appmock_id)
if 'mocked_app' in cfg['nodes']['node']:
mocked_app = cfg['nodes']['node']['mocked_app']
if mocked_app == 'op_worker' or mocked_app == 'oz_worker':
include_domain = True
appmock_ips.append(common.get_docker_ip(appmock_id))
common.merge(output, node_out)
common.wait_until(_ready, appmocks, APPMOCK_WAIT_FOR_NAGIOS_SECONDS)
if include_domain:
domains = {
'domains': {
domain(appmock_instance, uid): {
'ns': [],
'a': appmock_ips
}
}
}
common.merge(output, domains)
# Make sure domain are added to the dns server
dns.maybe_restart_with_configuration(dns_server, uid, output)
return output
|
3ptscience/steno3dpy | steno3d/examples/airports.py | Python | mit | 3,650 | 0 | """airports.py provides an example Steno3D project of airports"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from .base import BaseExample, exampleproperty
from ..point import Mesh0D, Point
from ..project import Project
DEG2RAD = np.pi/180
FT2KM = 12*2.54/100/1000
RADIUS = 6371
class Airports(BaseExample):
"""Class containing components of airport project. Components can be
viewed individually or copied into new resources or projects with
get_resources() and get_pr | oject(), respectively.
"""
@exampleproperty
def filenames(self):
"""airport files"""
return ['airports.dat', 'latitude.npy', 'longitude.npy',
'altitude.npy', 'license.txt']
@exampleproperty
def datafile(self):
"""full path to airport data file"""
return Airports.fetch_data(filename='airports.dat',
download_if_missing=False,
| verbose=False)
@exampleproperty
def latitude(self):
"""Airport lat, degrees, from openflights.org"""
return np.load(Airports.fetch_data(filename='latitude.npy',
download_if_missing=False,
verbose=False))
@exampleproperty
def longitude(self):
"""Airport lon, degrees, from openflights.org"""
return np.load(Airports.fetch_data(filename='longitude.npy',
download_if_missing=False,
verbose=False))
@exampleproperty
def altitude(self):
"""Airport alt, km, from openflights.org"""
return np.load(Airports.fetch_data(filename='altitude.npy',
download_if_missing=False,
verbose=False))
@classmethod
def get_project(self):
"""return airport points project"""
proj = Project(
title='Airport',
description='Project with airport points'
)
Point(
project=proj,
mesh=Mesh0D(
vertices=np.c_[self.geo_to_xyz(self.latitude,
self.longitude,
self.altitude)]
),
title='Airport Points'
)
return proj
@staticmethod
def geo_to_xyz(lat, lon, alt):
"""function geo_to_xyz
Inputs:
lat: latitude, degrees
lon: longitude, degrees
alt: altitude, km
Outputs:
x, y, z: spatial coordiantes relative to the center of the earth
Note:
This function assumes a shpherical earth
"""
lat *= DEG2RAD
lon *= DEG2RAD
x = (RADIUS + alt)*np.cos(lat)*np.cos(lon)
y = (RADIUS + alt)*np.cos(lat)*np.sin(lon)
z = (RADIUS + alt)*np.sin(lat)
return x, y, z
@staticmethod
def read_airports_data(filename):
"""Extract latitude, longitude, and altitude from file"""
lat = [] # Latitude
lon = [] # Longitude
alt = [] # Altitude
with open(filename) as f:
for line in f:
data = line.rstrip().split(',')
lat.append(float(data[6])*DEG2RAD)
lon.append(float(data[7])*DEG2RAD)
alt.append(float(data[8])*FT2KM)
return np.array(lat), np.array(lon), np.array(alt)
|
russbishop/swift | utils/swift_build_support/tests/test_cmake.py | Python | apache-2.0 | 12,511 | 0.00008 | # test_cmake.py - Unit tests for swift_build_support.cmake -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
import os
import unittest
from argparse import Namespace
from swift_build_support.cmake import CMake, CMakeOptions
from swift_build_support.toolchain import host_toolchain
class CMakeTestCase(unittest.TestCase):
def mock_distcc_path(self):
"""Return a path string of mock distcc executable
"""
return os.path.join(os.path.dirname(__file__),
'mock-distcc')
def default_args(self):
"""Return new args object with default values
"""
return Namespace(host_cc="/path/to/clang",
host_cxx="/path/to/clang++",
enable_asan=False,
enable_ubsan=False,
export_compile_commands=False,
distcc=False,
cmake_generator="Ninja",
clang_compiler_version=None,
build_jobs=8,
build_args=[],
verbose_build=False,
build_ninja=False)
def cmake(self, args):
"""Return new CMake object initialized with given args
"""
toolchain = host_toolchain()
toolchain.cc = args.host_cc
toolchain.cxx = args.host_cxx
if args.distcc:
toolchain.distcc = self.mock_distcc_path()
if args.build_ninja:
toolchain.ninja = '/path/to/built/ninja'
return CMake(args=args, toolchain=toolchain)
def test_common_options_defaults(self):
args = self.default_args()
cmake = self.cmake(args)
self.assertEqual(
list(cmake.common_options()),
["-G", "Ninja",
"-DCMAKE_C_COMPILER:PATH=/path/to/clang",
"-DCMAKE_CXX_COMPILER:PATH=/path/to/clang++"])
def test_common_options_asan(self):
args = self.default_args()
args.enable_asan = True
cmake = self.cmake(args)
self.assertEqual(
list(cmake.common_options()),
["-G", "Ninja",
"-DLLVM_USE_SANITIZER=Address",
"-DCMAKE_C_COMPILER:PATH=/path/to/clang",
"-DCMAKE_CXX_COMPILER:PATH=/path/to/clang++"])
def test_common_options_ubsan(se | lf):
args = self.default_args()
args.enable_ubsan = True
cmake = self.cmake(args)
self.assertEqual(
list(cmake.common_options()),
["-G", "Ninja",
"-DLLVM_USE_SANITIZER=Undefined",
| "-DCMAKE_C_COMPILER:PATH=/path/to/clang",
"-DCMAKE_CXX_COMPILER:PATH=/path/to/clang++"])
def test_common_options_asan_ubsan(self):
args = self.default_args()
args.enable_asan = True
args.enable_ubsan = True
cmake = self.cmake(args)
self.assertEqual(
list(cmake.common_options()),
["-G", "Ninja",
"-DLLVM_USE_SANITIZER=Address;Undefined",
"-DCMAKE_C_COMPILER:PATH=/path/to/clang",
"-DCMAKE_CXX_COMPILER:PATH=/path/to/clang++"])
def test_common_options_export_compile_commands(self):
args = self.default_args()
args.export_compile_commands = True
cmake = self.cmake(args)
self.assertEqual(
list(cmake.common_options()),
["-G", "Ninja",
"-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
"-DCMAKE_C_COMPILER:PATH=/path/to/clang",
"-DCMAKE_CXX_COMPILER:PATH=/path/to/clang++"])
def test_common_options_distcc(self):
args = self.default_args()
args.distcc = True
cmake = self.cmake(args)
self.assertEqual(
list(cmake.common_options()),
["-G", "Ninja",
"-DCMAKE_C_COMPILER:PATH=" + self.mock_distcc_path(),
"-DCMAKE_C_COMPILER_ARG1=/path/to/clang",
"-DCMAKE_CXX_COMPILER:PATH=" + self.mock_distcc_path(),
"-DCMAKE_CXX_COMPILER_ARG1=/path/to/clang++"])
def test_common_options_xcode(self):
args = self.default_args()
args.cmake_generator = 'Xcode'
cmake = self.cmake(args)
self.assertEqual(
list(cmake.common_options()),
["-G", "Xcode",
"-DCMAKE_C_COMPILER:PATH=/path/to/clang",
"-DCMAKE_CXX_COMPILER:PATH=/path/to/clang++",
"-DCMAKE_CONFIGURATION_TYPES=" +
"Debug;Release;MinSizeRel;RelWithDebInfo"])
def test_common_options_clang_compiler_version(self):
args = self.default_args()
args.clang_compiler_version = ("3", "8", "0")
cmake = self.cmake(args)
self.assertEqual(
list(cmake.common_options()),
["-G", "Ninja",
"-DCMAKE_C_COMPILER:PATH=/path/to/clang",
"-DCMAKE_CXX_COMPILER:PATH=/path/to/clang++",
"-DLLVM_VERSION_MAJOR:STRING=3",
"-DLLVM_VERSION_MINOR:STRING=8",
"-DLLVM_VERSION_PATCH:STRING=0"])
def test_common_options_build_ninja(self):
args = self.default_args()
args.build_ninja = True
cmake = self.cmake(args)
self.assertEqual(
list(cmake.common_options()),
["-G", "Ninja",
"-DCMAKE_C_COMPILER:PATH=/path/to/clang",
"-DCMAKE_CXX_COMPILER:PATH=/path/to/clang++",
"-DCMAKE_MAKE_PROGRAM=/path/to/built/ninja"])
def test_common_options_full(self):
args = self.default_args()
args.enable_asan = True
args.enable_ubsan = True
args.export_compile_commands = True
args.distcc = True
args.cmake_generator = 'Xcode'
args.clang_compiler_version = ("3", "8", "0")
args.build_ninja = True
cmake = self.cmake(args)
self.assertEqual(
list(cmake.common_options()),
["-G", "Xcode",
"-DLLVM_USE_SANITIZER=Address;Undefined",
"-DCMAKE_EXPORT_COMPILE_COMMANDS=ON",
"-DCMAKE_C_COMPILER:PATH=" + self.mock_distcc_path(),
"-DCMAKE_C_COMPILER_ARG1=/path/to/clang",
"-DCMAKE_CXX_COMPILER:PATH=" + self.mock_distcc_path(),
"-DCMAKE_CXX_COMPILER_ARG1=/path/to/clang++",
"-DCMAKE_CONFIGURATION_TYPES=" +
"Debug;Release;MinSizeRel;RelWithDebInfo",
"-DLLVM_VERSION_MAJOR:STRING=3",
"-DLLVM_VERSION_MINOR:STRING=8",
"-DLLVM_VERSION_PATCH:STRING=0"])
# NOTE: No "-DCMAKE_MAKE_PROGRAM=/path/to/built/ninja" because
# cmake_generator is 'Xcode'
def test_build_args_ninja(self):
args = self.default_args()
cmake = self.cmake(args)
self.assertEqual(
list(cmake.build_args()),
["-j8"])
args.verbose_build = True
cmake = self.cmake(args)
self.assertEqual(
list(cmake.build_args()),
["-j8", "-v"])
def test_build_args_makefile(self):
args = self.default_args()
args.cmake_generator = "Unix Makefiles"
cmake = self.cmake(args)
self.assertEqual(
list(cmake.build_args()),
["-j8"])
args.verbose_build = True
cmake = self.cmake(args)
self.assertEqual(
list(cmake.build_args()),
["-j8", "VERBOSE=1"])
def test_build_args_xcode(self):
args = self.default_args()
args.cmake_generator = "Xcode"
cmake = self.cmake(args)
self.assertEqual(
list(cmake.build_args()),
["-parallelizeTargets", "-jobs", "8"])
# NOTE: Xcode generator DOES NOT take 'verbose-build' into account.
args.verbose_build = True
cmake = self.cmake(args)
self.assertEqual( |
partofthething/home-assistant | tests/components/binary_sensor/test_device_condition.py | Python | apache-2.0 | 8,860 | 0.001693 | """The test for binary_sensor device automation."""
from datetime import timedelta
from unittest.mock import patch
import pytest
import homeassistant.components.automation as automation
from homeassistant.components.binary_sensor import DEVICE_CLASSES, DOMAIN
from homeassistant.components.binary_sensor.device_condition import ENTITY_CONDITIONS
from homeassistant.const import CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.helpers import device_registry
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import (
MockConfigEntry,
async_get_device_automation_capabilities,
async_get_device_automations,
async_mock_service,
mock_device_registry,
mock_registry,
)
from tests.components.blueprint.conftest import stub_blueprint_populate # noqa: F401
@pytest.fixture
def device_reg(hass):
"""Return an empty, loaded, registry."""
return mock_device_registry(hass)
@pytest.fixture
def entity_reg(hass):
"""Return an empty, loaded, registry."""
return mock_registry(hass)
@pytest.fixture
def calls(hass):
"""Track calls to a mock service."""
return async_mock_service(hass, "test", "automation")
async def test_get_conditions(hass, device_reg, entity_reg):
"""Test we get the expected conditions from a binary_sensor."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
config_entry = MockConfigEntry(domain="test", data={})
config_entry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
for device_class in DEVICE_CLASSES:
entity_reg.async_get_or_create(
DOMAIN,
"test",
platform.ENTITIES[device_class].unique_id,
device_id=device_entry.id,
)
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
expected_conditions = [
{
"condition": "device",
"domain": DOMAIN,
"type": condition["type"],
"device_id": device_entry.id,
"entity_id": platform.ENTITIES[device_class].entity_id,
}
for device_class in DEVICE_CLASSES
for condition in ENTITY_CONDITIONS[device_class]
]
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
assert conditions == expected_conditions
async def test_get_condition_capabilities(hass, device_reg, entity_reg):
"""Test we get the expected capabilities from a binary_sensor condition."""
config_entry = MockConfigEntry(domain="test", data={})
config_ent | ry.add_to_hass(hass)
device_entry = device_reg.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(device_registry.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
)
entity_reg.asyn | c_get_or_create(DOMAIN, "test", "5678", device_id=device_entry.id)
expected_capabilities = {
"extra_fields": [
{"name": "for", "optional": True, "type": "positive_time_period_dict"}
]
}
conditions = await async_get_device_automations(hass, "condition", device_entry.id)
for condition in conditions:
capabilities = await async_get_device_automation_capabilities(
hass, "condition", condition
)
assert capabilities == expected_capabilities
async def test_if_state(hass, calls):
"""Test for turn_on and turn_off conditions."""
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_bat_low",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_on {{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
},
{
"trigger": {"platform": "event", "event_type": "test_event2"},
"condition": [
{
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_not_bat_low",
}
],
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(("platform", "event.event_type"))
},
},
},
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
assert len(calls) == 1
assert calls[0].data["some"] == "is_on event - test_event1"
hass.states.async_set(sensor1.entity_id, STATE_OFF)
hass.bus.async_fire("test_event1")
hass.bus.async_fire("test_event2")
await hass.async_block_till_done()
assert len(calls) == 2
assert calls[1].data["some"] == "is_off event - test_event2"
async def test_if_fires_on_for_condition(hass, calls):
"""Test for firing if condition is on with delay."""
point1 = dt_util.utcnow()
point2 = point1 + timedelta(seconds=10)
point3 = point2 + timedelta(seconds=10)
platform = getattr(hass.components, f"test.{DOMAIN}")
platform.init()
assert await async_setup_component(hass, DOMAIN, {DOMAIN: {CONF_PLATFORM: "test"}})
await hass.async_block_till_done()
sensor1 = platform.ENTITIES["battery"]
with patch("homeassistant.core.dt_util.utcnow") as mock_utcnow:
mock_utcnow.return_value = point1
assert await async_setup_component(
hass,
automation.DOMAIN,
{
automation.DOMAIN: [
{
"trigger": {"platform": "event", "event_type": "test_event1"},
"condition": {
"condition": "device",
"domain": DOMAIN,
"device_id": "",
"entity_id": sensor1.entity_id,
"type": "is_not_bat_low",
"for": {"seconds": 5},
},
"action": {
"service": "test.automation",
"data_template": {
"some": "is_off {{ trigger.%s }}"
% "}} - {{ trigger.".join(
("platform", "event.event_type")
)
},
},
}
]
},
)
await hass.async_block_till_done()
assert hass.states.get(sensor1.entity_id).state == STATE_ON
assert len(calls) == 0
hass.bus.async_fire("test_event1")
await hass.async_block_till_done()
as |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.