repo_name stringlengths 5 100 | path stringlengths 4 231 | language stringclasses 1 value | license stringclasses 15 values | size int64 6 947k | score float64 0 0.34 | prefix stringlengths 0 8.16k | middle stringlengths 3 512 | suffix stringlengths 0 8.17k |
|---|---|---|---|---|---|---|---|---|
nerosketch/djing | agent/commands/dhcp.py | Python | unlicense | 2,115 | 0.000473 | from typing import Optional
from django.core.exceptions import MultipleObjectsReturned
from abonapp.models import Abon
from devapp.models import Device, Port
def dhcp_commit(client_ip: str, client_mac: str,
switch_mac: str, switch_port: int) -> Optional[str]:
try:
dev = Device.objects.get(mac_addr=switch_mac)
mngr_class = dev.get_manager_klass()
if mngr_class.get_is_use_device_port():
abon = Abon.objects.get(dev_port__device=dev,
dev_port__num=switch_port,
device=dev, is_active=True)
else:
abon = Abon.objects.get(device=dev, is_active=True)
if not abon.is_dynamic_ip:
return 'User settings is not dynamic | '
if client_ip == abon.ip_address:
return 'Ip has already attached'
abon.attach_ip_addr(client_ip, strict=False)
if abon.is_access():
r = abon.nas_sync_self()
return r if r else None
else:
return 'User %s is not access to service' % abon.username
except Abon.DoesNotExist:
return "User with device with mac '%s' does not exist" | % switch_mac
except Device.DoesNotExist:
return 'Device with mac %s not found' % switch_mac
except Port.DoesNotExist:
return 'Port %(switch_port)d on device with mac %(switch_mac)s does not exist' % {
'switch_port': int(switch_port),
'switch_mac': switch_mac
}
except MultipleObjectsReturned as e:
return 'MultipleObjectsReturned:' + ' '.join(
(type(e), e, str(switch_port))
)
def dhcp_expiry(client_ip: str) -> Optional[str]:
abon = Abon.objects.filter(
ip_address=client_ip, is_active=True
).exclude(current_tariff=None).first()
if abon is None:
return "Subscriber with ip %s does not exist" % client_ip
else:
is_freed = abon.free_ip_addr()
if is_freed:
abon.nas_sync_self()
def dhcp_release(client_ip: str) -> Optional[str]:
return dhcp_expiry(client_ip)
|
mrinalpande/Music_Encrypt | misc/text.py | Python | mit | 73 | 0.041096 | def main():
| f = open("test.txt","w")
f.writ | e("A"*10000000)
main() |
kho/mr-cdec | python/pkg/cdec/sa/extract.py | Python | apache-2.0 | 4,239 | 0.00519 | #!/usr/bin/env python
import sys
import os
import re
import gzip
import argparse
import logging
import signal
import multiprocessing as mp
import cdec.sa
from cdec.sa._sa import monitor_cpu
extractor, prefix = None, None
online, compress = False, False
def make_extractor(args):
global extractor, prefix, online, compress
signal.signal(signal.SIGINT, signal.SIG_IGN) # Let parent process catch Ctrl+C
load_features(args.features)
extractor = cdec.sa.GrammarExtractor(args.config, online)
prefix = args.grammars
online = args.online
compress = args.compress
def load_features(features):
for featdef in features:
logging.info('Loading additional feature definitions from %s', featdef)
prefix = os.path.dirname(featdef)
sys.path.append(prefix)
__import__(os.path.basename(featdef).replace('.py', ''))
sys.path.remove(prefix)
def extract(inp):
global extractor, prefix, online, compress
i, sentence = inp
sentence = sentence[:-1]
fields = re.split('\s*\|\|\|\s*', sentence)
suffix = ''
# 3 fields for online mode, 1 for normal
if online:
if len(fields) < 3:
sys.stderr.write('Error: online mode requires references and alignments.'
' Not adding sentence to training data: {}\n'.format(sentence))
sentence = fields[0]
else:
sentence, reference, alignment = fields[0:3]
if len(fields) > 3:
suffix = ' ||| ' + ' ||| '.join(fields[3:])
else:
if len(fields) > 1:
sentence = fields[0]
suffix = ' ||| ' + ' ||| '.join(fields[1:])
grammar_file = os.path.join(prefix, 'grammar.'+str(i))
if compress: grammar_file += '.gz'
with (gzip.open if compress else open)(grammar_file, 'w') as output:
for rule in extractor.grammar(sentence):
output.write(str(rule)+'\n')
# Add training instance _after_ ext | racting grammars
if online:
extractor.add_instance(sentence, reference, alignment)
grammar_file = os.path.abspath(grammar_file)
return '<seg grammar="{}" id="{}">{}< | /seg>{}'.format(grammar_file, i, sentence, suffix)
def main():
global online
logging.basicConfig(level=logging.INFO)
parser = argparse.ArgumentParser(description='Extract grammars from a compiled corpus.')
parser.add_argument('-c', '--config', required=True,
help='extractor configuration')
parser.add_argument('-g', '--grammars', required=True,
help='grammar output path')
parser.add_argument('-j', '--jobs', type=int, default=1,
help='number of parallel extractors')
parser.add_argument('-s', '--chunksize', type=int, default=10,
help='number of sentences / chunk')
parser.add_argument('-f', '--features', nargs='*', default=[],
help='additional feature definitions')
parser.add_argument('-o', '--online', action='store_true',
help='online grammar extraction')
parser.add_argument('-z', '--compress', action='store_true',
help='compress grammars with gzip')
args = parser.parse_args()
if not os.path.exists(args.grammars):
os.mkdir(args.grammars)
for featdef in args.features:
if not featdef.endswith('.py'):
sys.stderr.write('Error: feature definition file <{}>'
' should be a python module\n'.format(featdef))
sys.exit(1)
online = args.online
start_time = monitor_cpu()
if args.jobs > 1:
logging.info('Starting %d workers; chunk size: %d', args.jobs, args.chunksize)
pool = mp.Pool(args.jobs, make_extractor, (args,))
try:
for output in pool.imap(extract, enumerate(sys.stdin), args.chunksize):
print(output)
except KeyboardInterrupt:
pool.terminate()
else:
make_extractor(args)
for output in map(extract, enumerate(sys.stdin)):
print(output)
stop_time = monitor_cpu()
logging.info("Overall extraction step took %f seconds", stop_time - start_time)
if __name__ == '__main__':
main()
|
enthought/python-analytics | python_analytics/events.py | Python | bsd-3-clause | 583 | 0 | from __future__ import absolute_import, unicode_literals
from six import add_metaclass, text_type
from .event_en | coder import Parameter, EventEncoder
@add_metaclass(EventEncoder)
class Event(object):
hit = Parameter('t', text_type, required=True)
category = Parameter('ec', text_type, required=True)
action = Parameter('ea', text_type, required=True)
label = Parameter('el', text_type)
value = Parameter('ev', int)
def __init__(self, **kwargs):
self.hit = 'event'
| for name, value in kwargs.items():
setattr(self, name, value)
|
UstadMobile/eXePUB | twisted/test/test_lockfile.py | Python | gpl-2.0 | 1,423 | 0.003514 | # Copyright (c) 2005 Divmod, Inc.
# See LICENSE for details.
from twisted.trial import unittest
from twisted.python import l | ockfile
class LockingTestCase(unittest.TestCase):
def testBasics(self):
lockf = self.mktemp()
lock = lockfile.FilesystemLock(lockf)
self.failUnless(lock.lock())
self.failUnless(lock.clean)
lock.unlock()
self.failUnless(lock.lock())
self.failUnless(lock.clean)
lock.unlock()
def tes | tProtection(self):
lockf = self.mktemp()
lock = lockfile.FilesystemLock(lockf)
self.failUnless(lock.lock())
self.failUnless(lock.clean)
self.failIf(lock.lock())
lock.unlock()
def testBigLoop(self):
lockf = self.mktemp()
lock = lockfile.FilesystemLock(lockf)
self.failUnless(lock.lock())
for i in xrange(500):
self.failIf(lock.lock())
lock.unlock()
def testIsLocked(self):
lockf = self.mktemp()
self.failIf(lockfile.isLocked(lockf))
lock = lockfile.FilesystemLock(lockf)
self.failUnless(lock.lock())
self.failUnless(lockfile.isLocked(lockf))
lock.unlock()
self.failIf(lockfile.isLocked(lockf))
# A multiprocess test would be good here, for the sake of
# completeness. However, it is fairly safe to rely on the
# filesystem to provide the semantics we require.
|
geodrinx/gearthview | ext-libs/twisted/trial/_dist/test/test_distreporter.py | Python | gpl-3.0 | 2,019 | 0.001981 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.trial.distreporter}.
"""
from cStringIO import StringIO
from twisted.trial._dist.distreporter import DistReporter
from twisted.trial.unittest import TestCase
from twisted.trial.reporter import TreeReporter
class DistReporterTestCase(TestCase):
"""
Tests for L{DistReporter}.
"""
def setUp(self):
self.stream = StringIO()
self.distReporter = DistReporter(TreeReporter(self.stream))
self.test = TestCase()
def test_startSuccessStop(self):
"""
Success output only gets sent to the stream after the test has stopped.
"""
self.distReporter.startTest(self.test)
self.assertEqual(self.stream.getvalue(), "")
self.distReporter.addSuccess(self.test)
self.assertEqual(self.stream.getvalue(), "")
self.distReporter.stopTest(self.test)
self.assertNotEqual(self.stream.getvalue(), "")
def test_startErrorStop(self):
"""
Error output only gets sent to the stream after the test has stopped.
"""
self.distReporter.startTest(self.test)
self.assertEqual(self.stream.getvalue(), "")
self.distReporter.addError(self.test, "error")
self.assertEqual(self.stream.getvalue(), "")
self.distReporter.stopTest(self.test)
self.assertNotEqual | (self.stream.getvalue(), "")
def test_forwardedMethods(self):
"""
Calling methods of L{DistReporter} add calls to the running queue of
the test.
"""
self.distReporter.startTest(self.test)
self.di | stReporter.addFailure(self.test, "foo")
self.distReporter.addError(self.test, "bar")
self.distReporter.addSkip(self.test, "egg")
self.distReporter.addUnexpectedSuccess(self.test, "spam")
self.distReporter.addExpectedFailure(self.test, "err", "foo")
self.assertEqual(len(self.distReporter.running[self.test.id()]), 6)
|
BoPeng/simuPOP | docs/statGenoFreq.py | Python | gpl-2.0 | 1,692 | 0.004728 | #!/usr/bin/env python
#
# $File: statGenoFreq.py $
#
# This file is part of simuPOP, a forward-time population genetics
# simulation environment. Please visit http://simupop.sourceforge.net
# for details.
#
# Copyright (C) 2004 - 2010 Bo Peng (bpeng@mdanderson.org)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, | see <http://www.gnu.org/licenses/>.
#
# This script is an example in the simuPOP u | ser's guide. Please refer to
# the user's guide (http://simupop.sourceforge.net/manual) for a detailed
# description of this example.
#
import simuPOP as sim
pop = sim.Population(100, loci=[1, 1, 1], lociNames=['A', 'X', 'Y'],
chromTypes=[sim.AUTOSOME, sim.CHROMOSOME_X, sim.CHROMOSOME_Y])
sim.initGenotype(pop, freq=[0.01, 0.05, 0.94])
sim.stat(pop, genoFreq=['A', 'X']) # both loci indexes and names can be used.
print('Available genotypes on autosome:', list(pop.dvars().genoFreq[0].keys()))
for i in range(3):
for j in range(3):
print('%d-%d: %.3f' % (i, j, pop.dvars().genoFreq[0][(i,j)]))
print('Genotype frequency on chromosome X:\n', \
'\n'.join(['%s: %.3f' % (x,y) for x,y in pop.dvars().genoFreq[1].items()]))
|
aldebaran/qibuild | python/qitoolchain/test/test_qipackage.py | Python | bsd-3-clause | 9,535 | 0.000839 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2021 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" Test QiPackage """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import os
import pytest
import qitoolchain.qipackage
import qisys.archive
from qisys.test.conftest import skip_on_win
def test_equality():
""" Test Equality """
foo1 = qitoolchain.qipackage.QiPackage("foo", "1.2")
foo2 = qitoolchain.qipackage.QiPackage("foo", "1.2")
foo3 = qitoolchain.qipackage.QiPackage("foo", "1.3")
bar1 = qitoolchain.qipackage.QiPackage("bar", "1.2")
assert foo1 == foo2
assert foo2 < foo3
assert foo1 != bar1
def test_from_archive(tmpdir):
""" Test From Archive """
foo1 = tmpdir.mkdir("foo")
foo_xml = foo1.join("package.xml")
foo_xml.write("""<package name="foo" version="0.1"/>""")
archive = qisys.archive.compress(foo1.strpath, flat=True)
package = qitoolchain.qipackage.from_archive(archive)
assert package.name == "foo"
assert package.version == "0.1"
def test_skip_package_xml(tmpdir):
""" Test Skip Package Xml """
foo1 = tmpdir.mkdir("foo | ")
foo_xml = foo1.join("package.xml")
foo_xml.write("""<package name="foo" version="0.1"/>""")
foo1.ensure("include", "foo.h", file=True)
foo1.ensure("lib", "libfoo.so", file=True)
| package = qitoolchain.qipackage.QiPackage("foo", path=foo1.strpath)
dest = tmpdir.join("dest")
package.install(dest.strpath)
assert dest.join("include", "foo.h").check(file=True)
assert dest.join("lib", "libfoo.so").check(file=True)
assert not dest.join("package.xml").check(file=True)
def test_reads_runtime_manifest(tmpdir):
""" Test Read Runtime Manifest """""
boost_path = tmpdir.mkdir("boost")
boost_path.ensure("include", "boost.h", file=True)
boost_path.ensure("lib", "libboost.so", file=True)
runtime_manifest = boost_path.ensure("install_manifest_runtime.txt", file=True)
runtime_manifest.write(b"""lib/libboost.so\n""")
package = qitoolchain.qipackage.QiPackage("boost", path=boost_path.strpath)
dest = tmpdir.join("dest")
installed = package.install(dest.strpath, components=["runtime"])
assert not dest.join("include", "boost.h").check(file=True)
libbost_so = dest.join("lib", "libboost.so")
assert libbost_so.check(file=True)
assert installed == ["lib/libboost.so"]
def test_backward_compat_runtime_install(tmpdir):
""" Test Backward Compat Runtime """
boost_path = tmpdir.mkdir("boost")
boost_path.ensure("include", "boost.h", file=True)
boost_path.ensure("lib", "libboost.so", file=True)
boost_path.ensure("package.xml", file=True)
package = qitoolchain.qipackage.QiPackage("boost", path=boost_path.strpath)
dest = tmpdir.join("dest")
installed = package.install(dest.strpath, components=["runtime"])
assert not dest.join("include", "boost.h").check(file=True)
libbost_so = dest.join("lib", "libboost.so")
assert libbost_so.check(file=True)
assert installed == ["lib/libboost.so"]
def test_reads_release_mask(tmpdir):
""" Test Reads Release Mask """
qt_path = tmpdir.mkdir("qt")
qt_path.ensure("include", "qt.h", file=True)
qt_path.ensure("lib", "QtCore4.lib", file=True)
qt_path.ensure("lib", "QtCored4.lib", file=True)
qt_path.ensure("bin", "QtCore4.dll", file=True)
qt_path.ensure("bin", "QtCored4.dll", file=True)
runtime_mask = qt_path.ensure("runtime.mask", file=True)
runtime_mask.write(b"""\n# headers\nexclude include/.*\n\n# .lib\nexclude lib/.*\\.lib\n""")
release_mask = qt_path.ensure("release.mask", file=True)
release_mask.write(b"""\nexclude bin/QtCored4.dll\n""")
package = qitoolchain.qipackage.QiPackage("qt", path=qt_path.strpath)
dest = tmpdir.join("dest")
package.install(dest.strpath, release=True, components=["runtime"])
assert dest.join("bin", "QtCore4.dll").check(file=True)
assert not dest.join("lib", "QtCored4.lib").check(file=True)
def test_include_in_mask(tmpdir):
""" Test Include in Mask """
qt_path = tmpdir.mkdir("qt")
qt_path.ensure("bin", "assitant.exe")
qt_path.ensure("bin", "moc.exe")
qt_path.ensure("bin", "lrelease.exe")
qt_path.ensure("bin", "lupdate.exe")
runtime_mask = qt_path.ensure("runtime.mask", file=True)
runtime_mask.write(b"""\nexclude bin/.*\\.exe\ninclude bin/lrelease.exe\ninclude bin/lupdate.exe\n""")
dest = tmpdir.join("dest")
package = qitoolchain.qipackage.QiPackage("qt", path=qt_path.strpath)
package.install(dest.strpath, release=True, components=["runtime"])
assert dest.join("bin", "lrelease.exe").check(file=True)
assert not dest.join("bin", "moc.exe").check(file=True)
def test_load_deps(tmpdir):
""" Test Load Dependencies """
libqi_path = tmpdir.mkdir("libqi")
libqi_path.ensure("package.xml").write(b"""
<package name="libqi">
<depends testtime="true" names="gtest" />
<depends runtime="true" names="boost python" />
</package>
""")
package = qitoolchain.qipackage.QiPackage("libqi", path=libqi_path.strpath)
package.load_deps()
assert package.build_depends == set()
assert package.run_depends == set(["boost", "python"])
assert package.test_depends == set(["gtest"])
def test_extract_legacy_bad_top_dir(tmpdir):
""" Test Extract Legacy Bad Top Dir """
src = tmpdir.mkdir("src")
boost = src.mkdir("boost")
boost.ensure("lib", "libboost.so", file=True)
res = qisys.archive.compress(boost.strpath)
dest = tmpdir.mkdir("dest").join("boost-1.55")
qitoolchain.qipackage.extract(res, dest.strpath)
assert dest.join("lib", "libboost.so").check(file=True)
def test_extract_legacy_ok_top_dir(tmpdir):
""" Test Extract Legacy Ok Top Dir """
src = tmpdir.mkdir("src")
boost = src.mkdir("boost-1.55")
boost.ensure("lib", "libboost.so", file=True)
res = qisys.archive.compress(boost.strpath)
dest = tmpdir.mkdir("dest").join("boost-1.55")
qitoolchain.qipackage.extract(res, dest.strpath)
assert dest.join("lib", "libboost.so").check(file=True)
def test_extract_modern(tmpdir):
""" Test Extract Modern """
src = tmpdir.mkdir("src")
src.ensure("package.xml", file=True)
src.ensure("lib", "libboost.so", file=True)
output = tmpdir.join("boost.zip")
res = qisys.archive.compress(src.strpath, output=output.strpath, flat=True)
dest = tmpdir.mkdir("dest").join("boost-1.55")
qitoolchain.qipackage.extract(res, dest.strpath)
assert dest.join("lib", "libboost.so").check(file=True)
def test_installing_test_component(tmpdir):
""" Test Installing Test Component """
boost_path = tmpdir.mkdir("boost")
boost_path.ensure("include", "boost.h", file=True)
boost_path.ensure("lib", "libboost.so", file=True)
boost_path.ensure("package.xml", file=True)
package = qitoolchain.qipackage.QiPackage("boost", path=boost_path.strpath)
dest = tmpdir.join("dest")
_installed = package.install(dest.strpath, components=["test", "runtime"])
assert not dest.join("include", "boost.h").check(file=True)
def test_get_set_license(tmpdir):
""" Test Get Set Licence """
boost_path = tmpdir.mkdir("boost")
boost_path.join("package.xml").write("""\n<package name="boost" version="1.58" />\n""")
package = qitoolchain.qipackage.QiPackage("boost", path=boost_path.strpath)
assert package.license is None
package.license = "BSD"
package2 = qitoolchain.qipackage.QiPackage("boost", path=boost_path.strpath)
assert package2.license == "BSD"
def test_post_add_noop(tmpdir):
""" Test Post Add Noop """
boost_path = tmpdir.mkdir("boost")
boost_path.join("package.xml").write("""\n<package name="boost" version="1.58" />\n""")
package = qitoolchain.qipackage.QiPackage("boost", path=boost_path.strpath)
package.post_add() # no-op
def test_post_add_does_not_exist(tmpdir):
""" Test Post Add Does Not Exist """
boost_path = tmpdir.mkdir("boost")
boost_path.join("p |
111pontes/ydk-py | cisco-ios-xr/ydk/models/cisco_ios_xr/Cisco_IOS_XR_rgmgr_cfg.py | Python | apache-2.0 | 35,273 | 0.020696 | """ Cisco_IOS_XR_rgmgr_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR rgmgr package configuration.
This module contains definitions
for the following management objects\:
redundancy\-group\-manager\: Redundancy Group Manager
Configuration
Copyright (c) 2013\-2016 by Cisco Systems, Inc.
All rights reserved.
"""
import re
import collections
from enum import Enum
from ydk.types import Empty, YList, YLeafList, DELETE, Decimal64, FixedBitsDict
from ydk.errors import YPYError, YPYModelError
class IccpModeEnum(Enum):
"""
IccpModeEnum
Iccp mode
.. data:: singleton = 1
Run the ICCP group in Singleton mode
"""
singleton = 1
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_rgmgr_cfg as meta
return meta._meta_table['IccpModeEnum']
class RedundancyGroupManager(object):
"""
Redundancy Group Manager Configuration
.. attribute:: aps
MR\-APS groups
**type**\: :py:class:`Aps <ydk.models.cisco_ios_xr.Cisco_IOS_XR_rgmgr_cfg.RedundancyGroupManager.Aps>`
.. attribute:: enable
Enable redundancy group manager
**type**\: :py:class:`Empty<ydk.types.Empty>`
.. attribute:: iccp
ICCP configuration
**type**\: :py:class:`Iccp <ydk.models.cisco_ios_xr.Cisco_IOS_XR_rgmgr_cfg.RedundancyGroupManager.Iccp>`
"""
_prefix = 'rgmgr-cfg'
_revision = '2015-07-30'
def __init__(self):
self.aps = RedundancyGroupManager.Aps()
self.aps.parent = self
self.enable = None
self.iccp = RedundancyGroupManager.Iccp()
self.iccp.parent = self
class Aps(object):
"""
MR\-APS groups
.. attribute:: default_redundancy_group
Default SONET controller backup configuration
**type**\: :py:class:`DefaultRedundancyGroup <ydk.models.cisco_ios_xr.Cisco_IOS_XR_rgmgr_cfg.RedundancyGroupManager.Aps.DefaultRedundancyGroup>`
.. attribute:: groups
Redundancy Group Table
**type**\: :py:class:`Groups <ydk.models.cisco_ios_xr.Cisco_IOS_XR_rgmgr_cfg.RedundancyGroupManager.Aps.Groups>`
"""
_prefix = 'rgmgr-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.default_redundancy_group = RedundancyGroupManager.Aps.DefaultRedundancyGroup()
self.default_redundancy_group.parent = self
self.groups = RedundancyGroupManager.Aps.Groups()
self.groups.parent = self
class DefaultRedundancyGroup(object):
"""
Default SONET controller backup configuration
.. attribute:: backup_interface_name
Backup interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: next_hop_address
IPv4 address of remote peer
**type**\: str
**pattern:** (([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])\\.){3}([0\-9]\|[1\-9][0\-9]\|1[0\-9][0\-9]\|2[0\-4][0\-9]\|25[0\-5])(%[\\p{N}\\p{L}]+)?
"""
_prefix = 'rgmgr-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.backup_interface_name = None
self.next_hop_address = None
@property
def _common_path(self):
return '/Cisco-IOS-XR-rgmgr-cfg:redundancy-group-manager/Cisco-IOS-XR-rgmgr-cfg:aps/Cisco-IOS-XR-rgmgr-cfg:default-redundancy-group'
def is_config(self):
''' Returns True if this instance represents config data else returns False '''
return True
def _has_data(self):
if self.backup_interface_name is not None:
return True
if self.next_hop_address is not None:
return True
return False
@staticmethod
def _meta_info():
from ydk.models.cisco_ios_xr._meta import _Cisco_IOS_XR_rgmgr_cfg as meta
return meta._meta_table['RedundancyGroupManager.Aps.DefaultRedundancyGroup']['meta_info']
class Groups(object):
"""
Redundancy Group Table
.. attribute:: group
| Redundancy Group Configuration
**type**\: list of :py:class:`Group <ydk.models.cisco_ios_xr.Cisco_IOS_XR_rgmgr_cfg.RedundancyGroupManager.Aps.Groups.Group>`
"""
_prefix = 'rgmgr-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.group = YList()
self.group.parent = self
| self.group.name = 'group'
class Group(object):
"""
Redundancy Group Configuration
.. attribute:: group_id <key>
The redundancy group ID
**type**\: int
**range:** 1..32
.. attribute:: controllers
Controller configuration
**type**\: :py:class:`Controllers <ydk.models.cisco_ios_xr.Cisco_IOS_XR_rgmgr_cfg.RedundancyGroupManager.Aps.Groups.Group.Controllers>`
"""
_prefix = 'rgmgr-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.group_id = None
self.controllers = RedundancyGroupManager.Aps.Groups.Group.Controllers()
self.controllers.parent = self
class Controllers(object):
"""
Controller configuration
.. attribute:: controller
none
**type**\: list of :py:class:`Controller <ydk.models.cisco_ios_xr.Cisco_IOS_XR_rgmgr_cfg.RedundancyGroupManager.Aps.Groups.Group.Controllers.Controller>`
"""
_prefix = 'rgmgr-cfg'
_revision = '2015-07-30'
def __init__(self):
self.parent = None
self.controller = YList()
self.controller.parent = self
self.controller.name = 'controller'
class Controller(object):
"""
none
.. attribute:: controller_name <key>
Controller Name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]+))\|([a\-zA\-Z0\-9\_\-]\*\\d+)\|([a\-zA\-Z0\-9\_\-]\*\\d+\\.\\d+)\|(mpls)\|(dwdm)
.. attribute:: backup_interface_name
Backup interface name
**type**\: str
**pattern:** (([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){3,4}\\d+\\.\\d+)\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}([a\-zA\-Z0\-9\_]\*\\d+))\|(([a\-zA\-Z0\-9\_]\*\\d+/){2}( |
suyashphadtare/vestasi-erp-jan-end | erpnext/accounts/report/bank_reconciliation_statement/bank_reconciliation_statement.py | Python | agpl-3.0 | 2,556 | 0.024257 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe.utils import flt
from frappe import _
def execute(filters=None):
if not filters: filters = {}
columns = get_columns()
if not | filters.get("account"): return columns, []
data = get_entries(filters)
from erpnext.accounts.utils import get_balance_on
balance_as_per_system = get_balance_on(filters["account"], filters["report_date"])
total_debit, total_credit = 0,0
for d in data:
total_debit += flt(d[2])
total_credit += flt(d[3])
amounts_not_reflected_in_system = frappe.db.sql("""sele | ct sum(ifnull(jvd.debit, 0) - ifnull(jvd.credit, 0))
from `tabJournal Voucher Detail` jvd, `tabJournal Voucher` jv
where jvd.parent = jv.name and jv.docstatus=1 and jvd.account=%s
and jv.posting_date > %s and jv.clearance_date <= %s
""", (filters["account"], filters["report_date"], filters["report_date"]))
amounts_not_reflected_in_system = flt(amounts_not_reflected_in_system[0][0]) \
if amounts_not_reflected_in_system else 0.0
bank_bal = flt(balance_as_per_system) - flt(total_debit) + flt(total_credit) \
+ amounts_not_reflected_in_system
data += [
get_balance_row(_("System Balance"), balance_as_per_system),
[""]*len(columns),
["", _("Amounts not reflected in bank"), total_debit, total_credit, "", "", "", ""],
get_balance_row(_("Amounts not reflected in system"), amounts_not_reflected_in_system),
[""]*len(columns),
get_balance_row(_("Expected balance as per bank"), bank_bal)
]
return columns, data
def get_columns():
return ["Posting Date:Date:100", "Journal Voucher:Link/Journal Voucher:220",
"Debit:Currency:120", "Credit:Currency:120",
"Against Account:Link/Account:200", "Reference::100", "Ref Date:Date:110", "Clearance Date:Date:110"
]
def get_entries(filters):
entries = frappe.db.sql("""select
jv.posting_date, jv.name, jvd.debit, jvd.credit,
jvd.against_account, jv.cheque_no, jv.cheque_date, jv.clearance_date
from
`tabJournal Voucher Detail` jvd, `tabJournal Voucher` jv
where jvd.parent = jv.name and jv.docstatus=1
and jvd.account = %(account)s and jv.posting_date <= %(report_date)s
and ifnull(jv.clearance_date, '4000-01-01') > %(report_date)s
order by jv.name DESC""", filters, as_list=1)
return entries
def get_balance_row(label, amount):
if amount > 0:
return ["", label, amount, 0, "", "", "", ""]
else:
return ["", label, 0, abs(amount), "", "", "", ""] |
wevote/WeVoteServer | voter/controllers_contacts.py | Python | mit | 10,847 | 0.003227 | # voter/controllers_contacts.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
from dateutil import parser
from wevote_functions.functions import positive_value_exists
from .models import VoterContactEmail, VoterManager
def assemble_contact_display_name(
first_name=None,
last_name=None,
middle_name=None):
new_display_name = ''
if first_name is not None:
new_display_name += first_name
if middle_name is not None and middle_name != '':
if positive_value_exists(new_display_name):
new_display_name += " "
new_display_name += middle_name
if last_name is not None:
if positive_value_exists(new_display_name):
new_display_name += " "
new_display_name += last_name
return new_display_name
def move_voter_contact_email_to_another_voter(from_voter_we_vote_id, to_voter_we_vote_id):
status = ''
success = True
voter_contact_email_entries_moved = 0
if not positive_value_exists(from_voter_we_vote_id) or not positive_value_exists(to_voter_we_vote_id):
status += "MOVE_VOTER_CONTACT_EMAIL-MISSING_EITHER_FROM_OR_TO_VOTER_WE_VOTE_ID "
success = False
results = {
'status': status,
'success': success,
'from_voter_we_vote_id': from_voter_we_vote_id,
'to_voter_we_vote_id': to_voter_we_vote_id,
'voter_contact_email_entries_moved': voter_contact_email_entries_moved,
}
return results
if from_voter_we_vote_id == to_voter_we_vote_id:
status += "MOVE_VOTER_CONTACT_EMAIL-FROM_AND_TO_VOTER_WE_VOTE_IDS_IDENTICAL "
success = False
results = {
'status': status,
'success': success,
'from_voter_we_vote_id': from_voter_we_vote_id,
'to_voter_we_vote_id': to_voter_we_vote_id,
'voter_contact_email_entries_moved': voter_contact_email_entries_moved,
}
return results
# ######################
# Migrations
try:
voter_contact_email_entries_moved += VoterContactEmail.objects\
.filter(imported_by_voter_we_vote_id__iexact=from_voter_we_vote_id)\
.update(imported_by_voter_we_vote_id=to_voter_we_vote_id)
except Exception as e:
status += "FAILED-VOTER_CONTACT_EMAIL_UPDATE_IMPORTED_BY: " + str(e) + " "
try:
voter_contact_email_entries_moved += VoterContactEmail.objects\
.filter(voter_we_vote_id__iexact=from_voter_we_vote_id)\
.update(voter_we_vote_id=to_voter_we_vote_id)
except Exception as e:
status += "FAILED-VOTER_CONTACT_EMAIL_UPDATE: " + str(e) + " "
results = {
'status': status,
'success': success,
'from_voter_we_vote_id': from_voter_we_vote_id,
'to_voter_we_vote_id': to_voter_we_vote_id,
'voter_contact_email_entries_moved': voter_contact_email_entries_moved,
}
return results
def delete_google_contacts(voter_we_vote_id=''): # voterContactListSave - Delete
status = ''
success = True
google_contacts_deleted_count = 0
try:
# When we support other kinds of imports, don't delete entries which also have data from another source
# We will need to update remaining entries to set 'has_data_from_google_people_api' to False
# and clear other fields
google_contacts_deleted_tuple = VoterContactEmail.objects\
.filter(
has_data_from_google_people_api=True,
imported_by_voter_we_vote_id__iexact=voter_we_vote_id,
)\
.delete()
google_contacts_deleted_count = google_contacts_deleted_tuple[0]
except Exception as e:
status += "FAILED-VOTER_CONTACT_EMAIL_DELETE: " + str(e) + " "
results = {
'status': status,
'success': success,
'google_contacts_deleted_count': google_contacts_deleted_count,
}
return results
def filter_google_contacts(contacts):
filtered_contacts = []
strings_to_filter_out = [
'aws-nonprofit-credits@amazon.com',
'tickets@countable.uservoice.com',
'billing@nationbuilder.com',
'@noreply.github.com',
'@reply.github.com',
'@support.facebook.com',
'ra@godaddy.com',
'noreply',
'no-reply',
'support+',
'.zendesk.com',
'info@',
'support@',
]
for contact in contacts:
email_address_text = contact['email'] if 'email' in contact else ''
if positive_value_exists(email_address_text):
# If the email address contains any of the strings in strings_to_filter_out, don't import it
if not any(substring in email_address_text for substring in strings_to_filter_out):
filtered_contacts.append(contact)
return filtered_contacts
def save_google_contacts(voter_we_vote_id='', contacts=[]): # voterContactListSave
status = ''
success = True
voter_manager = VoterManager()
if contacts is not None:
contacts = filter_google_contacts(contacts)
existing_voter_contact_email_dict = {}
results = voter_manager.retrieve_voter_contact_email_list(
imported_by_voter_we_vote_id=voter_we_vote_id,
read_only=False)
if results['voter_contact_email_list_found']:
voter_contact_email_list = results['voter_contact_email_list']
for voter_contact_email in voter_contact_email_list:
existing_voter_contact_email_dict[voter_contact_email.email_address_text.lower()] = voter_contact_email
for contact in contacts:
email_address_text = contact['email'] if 'email' in contact else ''
if positive_value_exists(email_address_text):
google_contact_id = contact['id'] if 'id' in contact else ''
update_time = contact['update_time'] if 'update_time' in contact else ''
if positive_value_exists(update_time):
google_date_last_updated = parser.parse(update_time)
else:
google_date_last_updated = None
google_display_name = contact['display_name'] if 'display_name' in contact else ''
google_first_name = contact['given_name'] if 'given_name' in contact else ''
google_last_name = contact['family_name'] if 'family_name' in contact else ''
update_results = voter_manager.update_or_create_voter_contact_email(
email_address_text=email_address_text,
existing_voter_contact_email_dict=existing_voter_contact_email_dict,
from_google_people_api=True,
google_contact_id=google_contact_id,
google_date_last_updated=google_date_last_updated,
google_display_name=google_display_name,
google_first_name=google_first_name,
google_last_name=google_last_name,
imported_by_voter_we_vote_id=voter_we_vote_id,
)
status += update_results['status']
results = {
'status': status,
'success': success,
}
re | turn results
def get_voter_contact_email_value(voter_contact_email=None, best_option='', fallback_option=''):
if hasattr(voter_contact_email, best_option) and positive_value_exists(getattr(voter_contact_email, best_option)):
return getattr(voter_contact_email, best_option | )
else:
return getattr(voter_contact_email, fallback_option)
def voter_contact_list_retrieve_for_api(voter_we_vote_id=''): # voterContactListRetrieve
status = ''
voter_manager = VoterManager()
voter_contact_results = voter_manager.retrieve_voter_contact_email_list(
imported_by_voter_we_vote_id=voter_we_vote_id)
|
colorisa/openacademy-proyect | openacademy/model/partner.py | Python | apache-2.0 | 392 | 0.02551 | # -*- coding: utf-8 -*-
from openerp import fields, models
class Partner(models.Model):
_inherit = 'res.partner'
# Le agrego una nueva columna al modelo res.partner, por defecto los partner
# no son instructores
instructor = fields.Boo | lean("Instructor", default=False)
session_ids = fields.Many2many('openacademy.session',
strin | g="Attended Sessions", readonly=True) |
PaulSD/cgroupsd | lib/cgroupsd_listener.py | Python | gpl-3.0 | 12,128 | 0.013522 | #!/usr/bin/env python
#
# Copyright 2015 Paul Donohue <cgroupsd@PaulSD.com>
#
# This program is free software: you can redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with this program. If
# not, see <http://www.gnu.org/licenses/>.
#
#
# cgroups Management Daemon Linux Process Events Listener
#
# This module listens for events from the proc_events and reliable_proc_events modules and calls
# configured handler functions to create/configure cgroups and assign processes/threads to them as
# events are generated.
#
# Prerequisites:
# psutil (v2 or later) : `sudo apt-get install python-psutil` or
# `sudo yum install python-pip python-devel ; sudo pip install psutil`
# See the comments in libnl.py for additional prerequisites.
#
# Basic Usage:
# import cgroupsd_listener
# print('Do any initial cgroup setup/prep here')
# def handle_process(pid, proc, **args):
# print('Create/configure new cgroups if needed')
# print('Then assign the process to the relevant cgroup(s)')
# def handle_cleanup(**args):
# print('Delete any cgroups that were created by handle_process() and are now empty')
# cgroupsd_listener.handlers['process'] = handle_process
# cgroupsd_listener.handlers['cleanup'] = handle_cleanup
# cgroupsd_listener.start() # Should not be called until all handlers are registered
# ...
# cgroupsd_listener.stop() # Optional
#
# Available handler callbacks:
# handlers['process'] -> callback(type='process', pid, proc, event_args)
# This is called if a process (a thread group leader) may need to be reclassified into cgroups.
# On start-up, this will be called repeatedly for each existing process. During normal operation,
# this will be called any time a process is created or replaced (via fork or exec), changes its
# uid or gid, or changes its "comm" value (its command name as output by `ps -o comm`). If
# process events are lost or cannot be received, this may be called periodically for each existing
# process.
# When called, this callback should determine whether the specified process is relevant to this
# handler (this may be determined using process name, command line, UID, parent relationships, or
# any other available process data), determine whether any cgroups changes are needed, then make
# any necessary changes. In general, this callback should reclassify a process and all of its
# threads | at the same time, however it may handle a process and its threads differently if
# necessary. If necessary, this callback is responsible for creating and configuring cgroups
# before assigning processes to them. When practical, any created cgroups should be namespac | ed to
# allow an associated "cleanup" callback to identify and remove only the relevant cgroups. See
# http://www.freedesktop.org/wiki/Software/systemd/PaxControlGroups/ for additional cgroups
# rules/conventions.
# Note that any new processes spawned by this callback will trigger additional calls to this
# callback. To avoid infinite loops, this callback must be careful to avoid spawning new
# processes.
# The "proc" argument is a psutil.Process object for the specified "pid". See
# http://pythonhosted.org/psutil/#process-class for documentation. The "event_args" argument is
# the arguments dict associated with the proc_events or reliable_proc_events event that triggered
# this callback.
# This callback should return True if the specified process is relevant to this handler, or False
# if the specified process is not relevant. If True is returned, no further callbacks will be
# called for this process. If False is returned, cgroupsd_listener will continue to iterate
# through the registered "process" callbacks. If no "process" callbacks return True for the
# process, then cgroupsd_listener will iterate through the registered "thread" callbacks.
# handlers['thread'] -> callback(type='thread', tid, tproc, pid, proc, event_args)
# This is called if a thread (that may or may not be a thread group leader) may need to be
# reclassified into cgroups.
# On start-up, this will be called repeatedly for each existing thread. During normal operation,
# this will be called any time a process or thread is spawned, changes its uid or gid, or changes
# its "comm" value (its command name as output by `ps -o comm`). If process events are lost or
# cannot be received, this may be called periodically for each existing thread.
# When iterating through existing processes/threads, if a "process" callback returns True, then
# this will not be called for any threads in that process' thread group. When handling process
# events, "process" callbacks will only be called for thread group leaders, so this may be called
# for threads that are not group leaders but whose thread group leader would cause a "process"
# callback to return True. However, if a "process" callback returns True for a thread group
# leader, then this will not be called for that thread group leader.
# When called, this callback should determine whether the specified thread is relevant to this
# handler (this may be determined using thread name, command line, UID, thread group or process
# parent relationships, or any other available process data), determine whether any cgroups
# changes are needed, then make any necessary changes. If necessary, this callback is responsible
# for creating and configuring cgroups before assigning processes/threads to them. When
# practical, any created cgroups should be namespaced to allow an associated "cleanup" callback to
# identify and remove only the relevant cgroups. See
# http://www.freedesktop.org/wiki/Software/systemd/PaxControlGroups/ for additional cgroups
# rules/conventions.
# Note that any new processes or threads spawned by this callback will trigger additional calls
# to this callback. To avoid infinite loops, this callback must be careful to avoid spawning new
# processes or threads.
# The "tid" argument is the PID of the thread, and the "pid" argument is the PID of the thread
# group leader. For threads that are thread group leaders, tid == pid. The "tproc" and "proc"
# arguments are psutil.Process objects for the specified "tid" and "pid" respectively. See
# http://pythonhosted.org/psutil/#process-class for documentation. The "event_args" argument is
# the arguments dict associated with the proc_events or reliable_proc_events event that triggered
# this callback.
# This callback should return True if the specified thread is relevant to this handler, or False
# if the specified thread is not relevant. If True is returned, no further callbacks will
# be called for this thread. If False is returned, cgroupsd_listener will continue to iterate
# through the registered "thread" callbacks.
# handlers['cleanup'] -> callback(type='cleanup', tid, pid, event_args)
# This is intended to be used to clean up empty cgroups.
# It is called with tid=None and pid=None when cgroupsd_listener iterates through existing
# processes/threads on start-up or if process events are lost or cannot be received. (It is only
# called once after each full iteration.) It is also called with a tid/pid when a process event
# is received indicating that a thread or process has exited. The "event_args" argument is the
# arguments dict associated with the proc_events or reliable_proc_events event that triggered
# this callback.
# Note that any short-lived processes or threads spawned by this callback will trigger additional
# calls to this callback. To avoid infinite loops, this callback must be careful to avoid
# spawning short-lived processes or threads.
# |
BSI-CERT-Bund/cortex-analyzers | analyzers/GoogleSafebrowsing/safebrowsing.py | Python | agpl-3.0 | 2,689 | 0.004091 | import json
import requests
class SearchTypeNotSupportedError(Exception):
pass
class SafebrowsingClient:
"""Simple API to Google Safebrowsing and historic.
:param key: API key for google safebrowsing
:param client_id: ClientId for Safebrowsing API
:param client_version: ClientVersion for Safebrowsing API. Default: 0.1"""
def __init__(self, key: str, client_id: str, client_version: str='0.1'):
self.api_key = key
self.session = requests.Session()
self.url = 'https://safebrowsing.googleapis.com/v4/threatMatches:find?key={}'.format(key)
self.client_id = client_id
self.client_version = client_version
def __prepare_body(self, search_value: str, search_type: str='url') -> dict:
"""
Prepares the http body for querying safebrowsing api
:param search_value: value to search for
:param search_type: 'url' or 'ip'
:returns: http body as dict
"""
body = {
'client': {
'clientId': self.client_id,
'clientVersion': self.client_version
}
}
if search_type == 'url':
data = {
'threatTypes': [
'MALWARE', 'SOCIAL_ENGINEERING', 'UNWANTED_SOFTWARE', 'POTENTIALLY_HARMFUL_APPLICATION'
],
'platformTypes': ['ANY_PLATFORM', 'ALL_PLATFORMS', 'WINDOWS', 'LINUX', 'OSX', 'ANDROID', 'IOS'],
'threatEntryTypes': ['URL']
}
elif search_type == 'ip':
data = {
'threatTypes': ['MALWARE'],
'platformTypes': ['WINDOWS', 'LINUX', 'OSX'],
'threatEntryTypes': ['IP_RANGE']
}
else:
raise SearchTypeNotSupportedError('Currently supported search types are \'url\' and \'ip\'.')
| # TODO: Only found threatEntry 'url' in the docs. What to use for ip_range?
data['threatEntries'] = [{'url': search_value}]
body['threatInfo'] = data
return body
def __query_safebrowsing(self, search_value: str, search_type: str):
return json.loads(
self.session.post(
self.url,
json=self.__prepare_body(
search_value=search_value,
| search_type=search_type
)
).text
)
def query_url(self, url):
return self.__query_safebrowsing(search_value=url, search_type='url')
# TODO: Add another function for querying IPs
#def query_ip(self, ip):
# return self.__query_safebrowsing(search_value=ip, search_type='ip')
|
jni/networkx | networkx/algorithms/components/tests/test_biconnected.py | Python | bsd-3-clause | 6,615 | 0.037793 | #!/usr/bin/env python
from nose.tools import *
import networkx as nx
from networkx.algorithms.components import biconnected
from networkx import NetworkXNotImplemented
def assert_components_equal(x,y):
sx = set((frozenset([frozenset(e) for e in c]) for c in x))
sy = set((frozenset([frozenset(e) for e in c]) for c in y))
assert_equal(sx,sy)
def test_barbell():
G=nx.barbell_graph(8,4)
G.add_path([7,20,21,22])
G.add_cycle([22,23,24,25])
pts=set(biconnected.articulation_points(G))
assert_equal(pts,set([7,8,9,10,11,12,20,21,22]))
answer = [set([12, 13, 14, 15, 16, 17, 18, 19]),
set([0, 1, 2, 3, 4, 5, 6, 7]),
set([22, 23, 24, 25]),
set([11, 12]),
set([10, 11]),
set([9, 10]),
set([8, 9]),
set([7, 8]),
set([21, 22]),
set([20, 21]),
set([7, 20])]
bcc=list(biconnected.biconnected_components(G))
bcc.sort(key=len, reverse=True)
assert_equal(bcc,answer)
G.add_edge(2,17)
pts=set(biconnected.articulation_points(G))
assert_equal(pts,set([7,20,21,22]))
def test_articulation_points_cycle():
G=nx.cycle_graph(3)
G.add_cycle([1,3,4])
pts=set(biconnected.articulation_points(G))
assert_equal(pts,set([1]))
def test_is_biconnected():
G=nx.cycle_graph(3)
assert_true(biconnected.is_biconnected(G))
G.add_cycle([1,3,4])
assert_false(biconnected.is_biconnected(G))
def test_empty_is_biconnected():
G=nx.empty_graph(5)
assert_false(biconnected.is_biconnected(G))
G.add_edge(0,1)
assert_false(biconnected.is_biconnected(G))
def test_biconnected_components_cycle():
G=nx.cycle_graph(3)
G.add_cycle([1,3,4])
pts = set(map(frozenset,biconnected.biconnected_components(G)))
assert_equal(pts,set([frozenset([0,1,2]),frozenset([1,3,4])]))
def test_biconnected_component_subgraphs_cycle():
G=nx.cycle_graph(3)
G.add_cycle([1,3,4,5])
G.add_edge(1,3,eattr='red') # test copying of edge data
G.node[1]['nattr']='blue'
G.graph['gattr']='green'
Gc = set(biconnected.biconnected_component_subgraphs(G))
assert_equal(len(Gc),2)
g1,g2=Gc
if 0 in g1:
assert_true(nx.is_isomorphic(g1,nx.Graph([(0,1),(0,2),(1,2)])))
assert_true(nx.is_isomorphic(g2,nx.Graph([(1,3),(1,5),(3,4),(4,5)])))
assert_equal(g2[1][3]['eattr'],'red')
assert_equal(g2.node[1]['nattr'],'blue')
assert_equal(g2.graph['gattr'],'green')
g2[1][3]['eattr']='blue'
assert_equal(g2[1][3]['eattr'],'blue')
assert_equal(G[1][3]['eattr'],'red')
else:
assert_true(nx.is_isomorphic(g1,nx.Grap | h([(1,3),(1,5),(3,4),(4,5)])))
assert_true(nx.is_isomorphic(g2,nx.Graph([(0,1),(0,2),(1,2)])))
assert_equal(g1[1][3]['eattr'],'red')
assert_equal(g1.node[1]['nattr'],'blue')
assert_equal(g1.graph['gattr'],'green')
g1[1][3]['eattr']='blue'
assert_equal(g1[1][3]['eattr'],'blue')
assert_equal(G[1][3]['eattr'],'red')
def test_biconnected_components1():
# graph example from
# http://www.ibluemojo.com/school/articul_al | gorithm.html
edges=[(0,1),
(0,5),
(0,6),
(0,14),
(1,5),
(1,6),
(1,14),
(2,4),
(2,10),
(3,4),
(3,15),
(4,6),
(4,7),
(4,10),
(5,14),
(6,14),
(7,9),
(8,9),
(8,12),
(8,13),
(10,15),
(11,12),
(11,13),
(12,13)]
G=nx.Graph(edges)
pts = set(biconnected.articulation_points(G))
assert_equal(pts,set([4,6,7,8,9]))
comps = list(biconnected.biconnected_component_edges(G))
answer = [
[(3,4),(15,3),(10,15),(10,4),(2,10),(4,2)],
[(13,12),(13,8),(11,13),(12,11),(8,12)],
[(9,8)],
[(7,9)],
[(4,7)],
[(6,4)],
[(14,0),(5,1),(5,0),(14,5),(14,1),(6,14),(6,0),(1,6),(0,1)],
]
assert_components_equal(comps,answer)
def test_biconnected_components2():
G=nx.Graph()
G.add_cycle('ABC')
G.add_cycle('CDE')
G.add_cycle('FIJHG')
G.add_cycle('GIJ')
G.add_edge('E','G')
comps = list(biconnected.biconnected_component_edges(G))
answer = [
[tuple('GF'),tuple('FI'),tuple('IG'),tuple('IJ'),tuple('JG'),tuple('JH'),tuple('HG')],
[tuple('EG')],
[tuple('CD'),tuple('DE'),tuple('CE')],
[tuple('AB'),tuple('BC'),tuple('AC')]
]
assert_components_equal(comps,answer)
def test_biconnected_davis():
D = nx.davis_southern_women_graph()
bcc = list(biconnected.biconnected_components(D))[0]
assert_true(set(D) == bcc) # All nodes in a giant bicomponent
# So no articulation points
assert_equal(list(biconnected.articulation_points(D)),[])
def test_biconnected_karate():
K = nx.karate_club_graph()
answer = [set([0, 1, 2, 3, 7, 8, 9, 12, 13, 14, 15, 17, 18, 19,
20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33]),
set([0, 4, 5, 6, 10, 16]),
set([0, 11])]
bcc = list(biconnected.biconnected_components(K))
bcc.sort(key=len, reverse=True)
assert_true(list(biconnected.biconnected_components(K)) == answer)
assert_equal(list(biconnected.articulation_points(K)),[0])
def test_biconnected_eppstein():
# tests from http://www.ics.uci.edu/~eppstein/PADS/Biconnectivity.py
G1 = nx.Graph({
0: [1,2,5],
1: [0,5],
2: [0,3,4],
3: [2,4,5,6],
4: [2,3,5,6],
5: [0,1,3,4],
6: [3,4]})
G2 = nx.Graph({
0: [2,5],
1: [3,8],
2: [0,3,5],
3: [1,2,6,8],
4: [7],
5: [0,2],
6: [3,8],
7: [4],
8: [1,3,6]})
assert_true(biconnected.is_biconnected(G1))
assert_false(biconnected.is_biconnected(G2))
answer_G2 = [set([1, 3, 6, 8]), set([0, 2, 5]), set([2, 3]), set([4, 7])]
bcc = list(biconnected.biconnected_components(G2))
bcc.sort(key=len, reverse=True)
assert_equal(bcc, answer_G2)
def test_connected_raise():
DG = nx.DiGraph()
assert_raises(NetworkXNotImplemented,nx.biconnected_components,DG)
assert_raises(NetworkXNotImplemented,nx.biconnected_component_subgraphs,DG)
assert_raises(NetworkXNotImplemented,nx.biconnected_component_edges,DG)
assert_raises(NetworkXNotImplemented,nx.articulation_points,DG)
assert_raises(NetworkXNotImplemented,nx.is_biconnected,DG)
|
kYc0o/RIOT | tests/pkg_wolfcrypt-ed25519-verify/tests/01-run.py | Python | lgpl-2.1 | 192 | 0 | #!/usr/bin/env python3
import sys
from testrunner imp | ort run
def testfunc(child):
child.expect_exact("The signature is valid!")
if __name__ == "__main__":
sys.exit(run(testf | unc))
|
topaz1874/srvup | src/notification/views.py | Python | mit | 3,083 | 0.011028 | import json
from django.http import HttpResponse
from django.shortcuts import render,Http404,HttpResponseRedirect
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from .models import Notifications
# Create your views here.
@login_required
def all(request):
objects = Notifications.objects.get_all_user(request.user)
url_lst = []
for note in objects:
try:
target_url = note.target_object.get_absolute_url()
except:
target_url = None
context = {
'sender': note.sender_object,
'action': note.action_object,
'target': note.target_object,
'recipient': note.recipient,
'verify_read': reverse("notification_read", kwargs={"pk": note.id}),
'target_url': target_url,
'verb': note.verb,
}
if note.target_object and note.action_object and target_url:
url_lst.append(
("%s"%note.recipient,
"%(sender)s %(verb)s <a href='%(verify_read)s?next=%(target_url)s'>%(target)s</a> with %(action)s" %context,
"%s"%note.read))
elif note.target_object and n | ote.action_object and not target_url:
| url_lst.append(
("%s"%note.recipient,
"%(sender)s %(verb)s %(target)s with %(action)s" %context,
"%s"%note.read))
elif note.target_object and not note.action_object and not target_url:
url_lst.append(
("%s"%note.recipient,
"%(sender)s %(verb)s %(target)s" %context,
"%s"%note.read))
else:
url_lst.append(
("%s"%note.recipient,
"%(sender)s %(verb)s" %context,
"%s"%note.read))
context = {
'objects': objects,
'url_lst':url_lst,
}
return render(request, 'notification/all.html', context)
@login_required
def read(request, pk):
try:
next_url = request.GET.get('next', None)
object_note = Notifications.objects.get(id=pk)
if object_note.recipient == request.user:
object_note.read = True
object_note.save()
if next_url:
return HttpResponseRedirect(next_url)
else:
return HttpResponseRedirect(reverse('notification_all'))
else:
return Http404
except:
return HttpResponseRedirect(reverse('notification_all'))
@login_required
def get_ajax(request):
if request.is_ajax() and request.method == 'POST':
notes = Notifications.objects.get_recent_unread(request.user)
note_lst = []
count = notes.count()
for note in notes:
note_lst.append(str(note.get_link))
data = {
'notifications': note_lst,
'count': count,
}
json_data = json.dumps(data)
return HttpResponse(json_data, content_type='application/json')
else:
return Http404
|
dbarbier/privot | python/test/t_SymmetricMatrix_std.py | Python | lgpl-3.0 | 4,523 | 0.018351 | #! /usr/bin/env python
from openturns import *
from math import *
TESTPREAMBLE()
try :
# TEST NUMBER ZERO : DEFAULT CONSTRUCTOR AND STRING CONVERTER
print "test number zero : default constructor and string converter"
# Default constructor
symmetricMatrix0 =Sy | mmetricMat | rix()
# String converter
print "symmetricMatrix0 = " , repr(symmetricMatrix0)
# TEST NUMBER ONE : CONSTRUCTOR WITH SIZE, OPERATOR() AND STRING CONVERTER
print "test number one : constructor with size, operator() and string converter"
# Constructor with size
symmetricMatrix1 = SymmetricMatrix(2)
# Check operator() methods
symmetricMatrix1[0,0]=1.
symmetricMatrix1[1,0]=2.
symmetricMatrix1[0,1]=3.
symmetricMatrix1[1,1]=4.
# String converter
print "symmetricMatrix1 = " , repr(symmetricMatrix1)
# TEST NUMBER TWO : COPY CONSTRUCTOR AND STRING CONVERTER
print "test number two : copy constructor and string converter"
# Copy constructor
symmetricMatrix2 = SymmetricMatrix(symmetricMatrix1)
# String converter
print "symmetricMatrix2 = " , repr(symmetricMatrix2)
# TEST NUMBER THREE : GET DIMENSIONS METHODS
print "test number three : get dimensions methods"
# Get dimension methods
print "symmetricMatrix1's nbRows = " , symmetricMatrix1.getNbRows()
print "symmetricMatrix1's nbColumns = " , symmetricMatrix1.getNbColumns()
# TEST NUMBER FIVE : ASSIGNMENT METHOD
print "test number five : assignment method"
# Assignment method
# No sense with pyton
# TEST NUMBER SIX : TRANSPOSITION METHOD
print "test number six : transposition method"
# Check transpose method
symmetricMatrix4 = symmetricMatrix1.transpose()
print "symmetricMatrix1 transposed = " , repr(symmetricMatrix4)
# TEST NUMBER SEVEN : ADDITION METHOD
print "test number seven : addition method"
# Check addition method : we check the operator and the symmetry of the operator, thus testing the comparison operator
sum1 = symmetricMatrix1 + symmetricMatrix4
sum2 = symmetricMatrix4 + symmetricMatrix1
print "sum1 = " , repr(sum1)
print "sum2 = " , repr(sum2)
print "sum1 equals sum2 = " , sum1 == sum2
# TEST NUMBER EIGHT : SUBSTRACTION METHOD
print "test number eight : substraction method"
# Check substraction method
diff = symmetricMatrix1-symmetricMatrix4
print "diff = " , repr(diff)
# TEST NUMBER NINE : MATRIX MULTIPLICATION METHOD
print "test number nine : matrix multiplication method"
# Check multiplication method
prod = symmetricMatrix1*symmetricMatrix4
print "prod = " , repr(prod)
# TEST NUMBER TEN : MULTIPLICATION WITH A NUMERICAL POINT METHOD
print "test number ten : multiplication with a numerical point method"
# Create the numerical point
pt = NumericalPoint()
pt.add(1.)
pt.add(2.)
print "pt = " , repr(pt)
# Check the product method
ptResult = symmetricMatrix1* pt
print "ptResult = " , repr(ptResult)
# TEST NUMBER ELEVEN : MULTIPLICATION AND DIVISION BY A NUMERICAL SCALAR METHODS
print "test number eleven : multiplication and division by a numerical scalar methods"
# Check the multiplication method
s=3.
scalprod1 = symmetricMatrix1 * s
# bug PYTHON scalprod2 = s * symmetricMatrix1
scalprod3 = symmetricMatrix1 * s
print "scalprod1 = " , repr(scalprod1)
# print "scalprod2 = " , scalprod2
print "scalprod3 = " , repr(scalprod3)
# print "scalprod1 equals scalprod2 = " , (scalprod1 == scalprod2)
print "scalprod1 equals scalprod3 = " , (scalprod1 == scalprod3)
# print "scalprod2 equals scalprod3 = " , (scalprod2 == scalprod3)
# Check the division method
scaldiv1 = symmetricMatrix1/s
scaldiv2 = symmetricMatrix1/s
print "scaldiv1 = " , repr(scaldiv1)
print "scaldiv2 = " , repr(scaldiv2)
print "scaldiv1 equals scaldiv2 = " , (scaldiv1 == scaldiv2)
# TEST NUMBER TWELVE : ISEMPTY METHOD
print "test number twelve : isEmpty method"
# Check method isEmpty
symmetricMatrix5 = SymmetricMatrix()
symmetricMatrix6 = SymmetricMatrix()
print "symmetricMatrix0 is empty = " , symmetricMatrix0.isEmpty()
print "symmetricMatrix1 is empty = " , symmetricMatrix1.isEmpty()
print "symmetricMatrix5 is empty = " , symmetricMatrix5.isEmpty()
except :
import sys
print "t_SymmetricMatrix_std.py", sys.exc_type, sys.exc_value
|
potatosushi/Discord-Notification-Bot | discord/server.py | Python | mit | 13,502 | 0.001926 | # -*- coding: utf-8 -*-
"""
The MIT License (MIT)
Copyright (c) 2015-2016 Rapptz
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
"""
from . import utils
from .role import Role
from .member import Member
from .emoji import Emoji
from .game import Game
from .channel import Channel
from .enums import ServerRegion, Status, try_enum, VerificationLevel
from .mixins import Hashable
class Server(Hashable):
"""Represents a Discord server.
Supported Operations:
+-----------+--------------------------------------+
| Operation | Description |
+===========+======================================+
| x == y | Checks if two servers are equal. | |
+-----------+--------------------------------------+
| x != y | Checks if two servers are not equal. |
+-----------+--------------------------------------+
| hash(x) | Returns the serve | r's hash. |
+-----------+--------------------------------------+
| str(x) | Returns the server's name. |
+-----------+--------------------------------------+
Attributes
----------
name : str
The server name.
me : :class:`Member`
Similar to :attr:`Client.user` except an instance of :class:`Member`.
This is essentially used to get the member version of yourself.
roles
A list of :class:`Role` that the server has available.
emojis
A list of :class:`Emoji` that the server owns.
region : :class:`ServerRegion`
The region the server belongs on. There is a chance that the region
will be a ``str`` if the value is not recognised by the enumerator.
afk_timeout : int
The timeout to get sent to the AFK channel.
afk_channel : :class:`Channel`
The channel that denotes the AFK channel. None if it doesn't exist.
members
An iterable of :class:`Member` that are currently on the server.
channels
An iterable of :class:`Channel` that are currently on the server.
icon : str
The server's icon.
id : str
The server's ID.
owner : :class:`Member`
The member who owns the server.
unavailable : bool
Indicates if the server is unavailable. If this is ``True`` then the
reliability of other attributes outside of :meth:`Server.id` is slim and they might
all be None. It is best to not do anything with the server if it is unavailable.
Check the :func:`on_server_unavailable` and :func:`on_server_available` events.
large : bool
Indicates if the server is a 'large' server. A large server is defined as having
more than ``large_threshold`` count members, which for this library is set to
the maximum of 250.
voice_client: Optional[:class:`VoiceClient`]
The VoiceClient associated with this server. A shortcut for the
:meth:`Client.voice_client_in` call.
mfa_level: int
Indicates the server's two factor authorisation level. If this value is 0 then
the server does not require 2FA for their administrative members. If the value is
1 then they do.
verification_level: :class:`VerificationLevel`
The server's verification level.
features: List[str]
A list of features that the server has. They are currently as follows:
- ``VIP_REGIONS``: Server has VIP voice regions
- ``VANITY_URL``: Server has a vanity invite URL (e.g. discord.gg/discord-api)
- ``INVITE_SPLASH``: Server's invite page has a special splash.
splash: str
The server's invite splash.
"""
__slots__ = ['afk_timeout', 'afk_channel', '_members', '_channels', 'icon',
'name', 'id', 'owner', 'unavailable', 'name', 'region',
'_default_role', '_default_channel', 'roles', '_member_count',
'large', 'owner_id', 'mfa_level', 'emojis', 'features',
'verification_level', 'splash' ]
def __init__(self, **kwargs):
self._channels = {}
self.owner = None
self._members = {}
self._from_data(kwargs)
@property
def channels(self):
return self._channels.values()
def get_channel(self, channel_id):
"""Returns a :class:`Channel` with the given ID. If not found, returns None."""
return self._channels.get(channel_id)
def _add_channel(self, channel):
self._channels[channel.id] = channel
def _remove_channel(self, channel):
self._channels.pop(channel.id, None)
@property
def members(self):
return self._members.values()
def get_member(self, user_id):
"""Returns a :class:`Member` with the given ID. If not found, returns None."""
return self._members.get(user_id)
def _add_member(self, member):
self._members[member.id] = member
def _remove_member(self, member):
self._members.pop(member.id, None)
def __str__(self):
return self.name
def _update_voice_state(self, data):
user_id = data.get('user_id')
member = self.get_member(user_id)
before = None
if member is not None:
before = member._copy()
ch_id = data.get('channel_id')
channel = self.get_channel(ch_id)
member._update_voice_state(voice_channel=channel, **data)
return before, member
def _add_role(self, role):
# roles get added to the bottom (position 1, pos 0 is @everyone)
# so since self.roles has the @everyone role, we can't increment
# its position because it's stuck at position 0. Luckily x += False
# is equivalent to adding 0. So we cast the position to a bool and
# increment it.
for r in self.roles:
r.position += bool(r.position)
self.roles.append(role)
def _remove_role(self, role):
# this raises ValueError if it fails..
self.roles.remove(role)
# since it didn't, we can change the positions now
# basically the same as above except we only decrement
# the position if we're above the role we deleted.
for r in self.roles:
r.position -= r.position > role.position
def _from_data(self, guild):
# according to Stan, this is always available even if the guild is unavailable
# I don't have this guarantee when someone updates the server.
member_count = guild.get('member_count', None)
if member_count:
self._member_count = member_count
self.name = guild.get('name')
self.region = try_enum(ServerRegion, guild.get('region'))
self.verification_level = try_enum(VerificationLevel, guild.get('verification_level'))
self.afk_timeout = guild.get('afk_timeout')
self.icon = guild.get('icon')
self.unavailable = guild.get('unavailable', False)
self.id = guild['id']
self.roles = [Role(server=self, **r) for r in guild.get('roles', [ |
amcat/amcat | api/rest/tests/test_renderer.py | Python | agpl-3.0 | 2,580 | 0.003488 | ###########################################################################
# (C) Vrije | Universiteit, Amsterdam (the Netherl | ands) #
# #
# This file is part of AmCAT - The Amsterdam Content Analysis Toolkit #
# #
# AmCAT is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Affero General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# AmCAT is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public #
# License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. #
###########################################################################
from amcat.tools import amcattest
from amcat.tools.toolkit import read_date
from api.rest.tablerenderer import TableRenderer
class TestTableRenderer(amcattest.AmCATTestCase):
def _test(self, d, header, data):
t = TableRenderer().tablize(d)
# check contents are the same while allowing for header order changes
mapping = {str(col): i for (i, col) in enumerate(t.get_columns())}
self.assertEqual(set(header), set(mapping))
found = [tuple(row[mapping[h]] for h in header)
for row in t.to_list(tuple_name=None)]
#import json; print(json.dumps(found, indent=2))
self.assertEqual(data, found)
def test_tablize(self):
self._test([{"a": 1}], ["a"], [(1,)])
self._test([{"a": 1, "c": 3}, {"a": 1, "b": 2}], ["a", "c", "b"], [(1, 3, None), (1, None, 2)])
self._test([{"a": 1, "c": 3}, {"a": 1, "b": {"x": "X", "y": "Y"}}],
["a", "c", "b.x", "b.y"], [(1, 3, None, None), (1, None, "X", "Y")])
self._test([{"a": 1, "c": 3}, {"a": 4, "d": [{"a":"DA"}, {"b":"DB"}]}],
["a", "c", "d.0.a", "d.1.b"],
[(1, 3, None, None), (4, None, "DA", "DB")])
|
bryanveloso/avalonstar-tv | apps/subscribers/management/commands/updatetickets.py | Python | apache-2.0 | 2,623 | 0.00305 | # -*- coding: utf-8 -*-
import logging
logger = logging.getLogger(__name__)
import requests
import os
from django.core.management.base import NoArgsCommand
from apps.subscribers.models import Ticket
class Command(NoArgsCommand):
help = 'Loops through all subscribers and marks each ticket appropriately.' |
def handle_noargs(self, **o | ptions):
# Prepare our request.
headers = {
'Authorization': 'OAuth %s' % os.environ.get('TWITCH_OAUTH_TOKEN'),
'Accept': 'application/vnd.twitchtv.v3+json' }
url = 'https://api.twitch.tv/kraken/channels/avalonstar/subscriptions'
# Let's not invalidate anything unnecessarily. If we hit an exception
# with the first request, then bail.
try:
r = requests.get(url, headers=headers)
except requests.exceptions.RequestException as e:
logger.exception(e)
pass
# Rather than mark active tickets as inactive, mark all tickets as
# inactive. As we loop through the Twitch API, we'll mark
Ticket.objects.invalidate_tickets()
count = r.json().get('_total') # Total number of tickets.
limit = 100 # Maximum number of tickets we can fetch at once.
while url:
# To keep our dyno-hour usage down, we have to make sure that
# requests aren't hung up. So try the request and if a `Timeout`
# is thrown, bail.
try:
response = requests.get(url, headers=headers, params={'limit': limit}, timeout=1)
except requests.exceptions.RequestException as e:
logger.exception(e)
break
data = response.json()
tickets = data['subscriptions']
# The Twitch API doesn't stop offering `next` URLs when no results
# are available. So if we don't have tickets, shut it down.
if not tickets:
break
# We have tickets. Let's check each ticket and mark if that person
# as active if their ticket still exists in Twitch's API.
for ticket in tickets:
name = ticket['user']['name']
updates = {
'display_name': ticket['user']['display_name'],
'is_active': True,
'updated': ticket['created_at'],
'twid': ticket['_id'] }
t, created = Ticket.objects.update_or_create(name=name, defaults=updates)
# Done. Grab `next` and keep looping.
url = data['_links']['next']
|
joshsmith2/superlists | lists/tests/test_views.py | Python | gpl-2.0 | 5,826 | 0.001202 | from django.core.urlresolvers import resolve
from django.template.loader import render_to_string
from django.test import TestCase
from django.http import HttpRequest
from django.utils.html import escape
from unittest import skip
from lists.views import home_page
from lists.models import Item, List
from lists.forms import (
ItemForm, ExistingListItemForm,
EMPTY_ITEM_ERROR, DUPLICATE_ITEM_ERROR
)
class HomePageTest(TestCase):
def test_home_page_renders_home_template(self):
response = self.client.get('/')
self.assertTemplateUsed(response, 'home.html')
def test_home_page_uses_item_form(self):
response = self.client.get('/')
self.assertIsInstance(response.context['form'], ItemForm)
class ListViewTest(TestCase):
def post_invalid_input(self):
list_ = List.objects.create()
return self.client.post(
'/lists/%d/' % (list_.id),
data={'text': ''}
)
def test_for_invalid_input_nothing_saved_to_db(self):
self.post_invalid_input()
self.assertEqual(Item.objects.count(), 0)
def test_for_invalid_input_renders_list_template(self):
response = self.post_invalid_input()
self.assertEqual(response.status_code, 200)
def test_for_invalid_input_passes_form_to_template(self):
response = self.post_invalid_input()
self.assertIsInstance(response.context['form'], ExistingListItemForm)
def test_for_invalid_input_shows_error_on_page(self):
response = self.post_invalid_input()
self.assertContains(response, escape(EMPTY_ITEM_ERROR))
def test_duplicate_item_validation_errors_end_up_on_lists_page(self):
list1 = List.objects.create()
item1 = Item.objects.create(list=list1, text='textey')
response = self.client.post(
'/lists/%d/' % (list1.id),
data={'text':'textey'}
)
expected_error = escape(DUPLICATE_ITEM_ERROR)
self.assertContains(response, expected_error)
self.assertTemplateUsed(response, 'list.html')
self.assertEqual(Item.objects.all().count(), 1)
def test_uses_list_template(self):
list_ = List.objects.create()
response = self.client.get('/lists/%d/' % (list_.id,))
self.assertTemplateUsed(response, 'list.html')
def test_display_only_items_for_that_list(self):
correct_list = List.objects.create()
Item.objects.create(text='i1', list=correct_list)
Item.objects.create(text='i2', list=correct_list)
other_list = List.objects.create()
Item.objects.create(text='i1o', list=other_list)
Item.objects.create(text='i2o', list=other_list)
response = self.client.get('/lists/%d/' % (correct_list.id,))
self.assertContains(response, 'i1')
self.assertContains(response, 'i2')
self.assertNotContains(response, 'i1o')
self.assertNotContains(response, 'i2o')
def test_passes_correct_list_to_template(self):
other_list = List.objects.create()
correct_list = List.objects.create()
response = self.client.get('/lists/%d/' % (correct_list.id,))
self.assertEqual(response.context['list'], correct_list)
def test_can_save_a_POST_request_to_an_existing_list(self):
other_list = List.objects.create()
correct_list = List.objects.create()
self.client.post(
'/lists/%d/' % (correct_list.id,),
data={'text': 'A new item for an existing list'}
)
self.assertEqual(Item.objects.count(), 1)
new_item = Item.objects.first()
self.assertEqual(new_item.text, 'A new item for an existing list')
self.assertEqual(new_item.list, correct_list)
def test_POST_redirects_to_list_view(self):
other_list = List.objects.create()
correct_list = List.objects.create()
response = self.client.post (
'/lists/%d/' % (correct_list.id,),
data={'text': 'A new item for an existing list'}
)
self.assertRedirects(response, '/lists/%d/' % (correct_list.id,))
def test_displays_item_form(self):
list_ = List.objects.create()
| response = self.client.get('/lists/%d/' % (list_.id))
self.assertIsInstance(response.context['form'], ExistingListItemForm)
self.assertContains(response, 'name="text"')
class NewListTest(TestCase):
def test_saving_a_POST_request(self):
self.client.post(
'/lists/new',
data={'text': 'A new list item'},
)
| self.assertEqual(Item.objects.count(), 1)
new_item = Item.objects.first()
self.assertEqual(new_item.text, 'A new list item')
def test_redirects_after_POST(self):
response = self.client.post(
'/lists/new',
data={'text':'A new list item'}
)
new_list = List.objects.first()
self.assertRedirects(response, '/lists/%d/' % (new_list.id,))
def test_for_invalid_input_renders_home_template(self):
response = self.client.post('/lists/new', data={'text': ''})
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'home.html')
def test_validation_errors_are_shown_on_home_page(self):
response = self.client.post('/lists/new', data={'text': ''})
self.assertContains(response, escape(EMPTY_ITEM_ERROR))
def test_for_invalid_input_passes_form_to_template(self):
response = self.client.post('/lists/new', data={'text': ''})
self.assertIsInstance(response.context['form'], ItemForm)
def test_invalid_list_items_arent_saved(self):
self.client.post('/lists/new', data={"text": ''})
self.assertEqual(List.objects.count(), 0)
self.assertEqual(Item.objects.count(), 0)
|
sbidoul/buildbot | master/buildbot/test/unit/test_www_service.py | Python | gpl-2.0 | 10,362 | 0.000579 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
import calendar
import datetime
import jwt
import mock
from twisted.cred import strcred
from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
from twisted.internet import defer
from twisted.trial import unittest
from twisted.web._auth.wrapper import HTTPAuthSessionWrapper
from twisted.web.server import Request
from buildbot.test.unit import test_www_hooks_base
from buildbot.test.util import www
from buildbot.www import auth
from buildbot.www import change_hook
from buildbot.www import resource
from buildbot.www import rest
from buildbot.www import service
class NeedsReconfigResource(resource.Resource):
needsReconfig = True
reconfigs = 0
def reconfigResource(self, config):
NeedsReconfigResource.reconfigs += 1
class Test(www.WwwTestMixin, unittest.TestCase):
def setUp(self):
self.master = self.make_master(url='h:/a/b/')
self.svc = self.master.www = service.WWWService()
self.svc.setServiceParent(self.master)
def makeConfig(self, **kwargs):
w = dict(port=None, auth=auth.NoAuth(), logfileName='l')
w.update(kwargs)
new_config = mock.Mock()
new_config.www = w
new_config.buildbotURL = 'h:/'
self.master.config = new_config
return new_config
def test_reconfigService_no_port(self):
new_config = self.makeConfig()
d = self.svc.reconfigServiceWithBuildbotConfig(new_config)
@d.addCallback
def check(_):
self.assertEqual(self.svc.site, None)
return d
@defer.inlineCallbacks
def test_reconfigService_reconfigResources(self):
new_config = self.makeConfig(port=8080)
self.patch(rest, 'RestRootResource', NeedsReconfigResource)
NeedsReconfigResource.reconfigs = 0
# first time, reconfigResource gets called along with setupSite
yield self.svc.reconfigServiceWithBuildbotConfig(new_config)
self.assertEqual(NeedsReconfigResource.reconfigs, 1)
# and the next time, setupSite isn't called, but reconfigResource is
yield self.svc.reconfigServiceWithBuildbotConfig(new_config)
self.assertEqual(NeedsReconfigResource.reconfigs, 2)
def test_reconfigService_port(self):
new_config = self.makeConfig(port=20)
d = self.svc.reconfigServiceWithBuildbotConfig(new_config)
@d.addCallback
def c | heck(_):
| self.assertNotEqual(self.svc.site, None)
self.assertNotEqual(self.svc.port_service, None)
self.assertEqual(self.svc.port, 20)
return d
def test_reconfigService_expiration_time(self):
new_config = self.makeConfig(port=80, cookie_expiration_time=datetime.timedelta(minutes=1))
d = self.svc.reconfigServiceWithBuildbotConfig(new_config)
@d.addCallback
def check(_):
self.assertNotEqual(self.svc.site, None)
self.assertNotEqual(self.svc.port_service, None)
self.assertEqual(service.BuildbotSession.expDelay, datetime.timedelta(minutes=1))
return d
def test_reconfigService_port_changes(self):
new_config = self.makeConfig(port=20)
d = self.svc.reconfigServiceWithBuildbotConfig(new_config)
@d.addCallback
def reconfig(_):
newer_config = self.makeConfig(port=999)
return self.svc.reconfigServiceWithBuildbotConfig(newer_config)
@d.addCallback
def check(_):
self.assertNotEqual(self.svc.site, None)
self.assertNotEqual(self.svc.port_service, None)
self.assertEqual(self.svc.port, 999)
return d
def test_reconfigService_port_changes_to_none(self):
new_config = self.makeConfig(port=20)
d = self.svc.reconfigServiceWithBuildbotConfig(new_config)
@d.addCallback
def reconfig(_):
newer_config = self.makeConfig()
return self.svc.reconfigServiceWithBuildbotConfig(newer_config)
@d.addCallback
def check(_):
# (note the site sticks around)
self.assertEqual(self.svc.port_service, None)
self.assertEqual(self.svc.port, None)
return d
def test_setupSite(self):
self.svc.setupSite(self.makeConfig())
site = self.svc.site
# check that it has the right kind of resources attached to its
# root
root = site.resource
req = mock.Mock()
self.assertIsInstance(root.getChildWithDefault(b'api', req),
rest.RestRootResource)
def test_setupSiteWithProtectedHook(self):
checker = InMemoryUsernamePasswordDatabaseDontUse()
checker.addUser("guest", "password")
self.svc.setupSite(self.makeConfig(
change_hook_dialects={'base': True},
change_hook_auth=[checker]))
site = self.svc.site
# check that it has the right kind of resources attached to its
# root
root = site.resource
req = mock.Mock()
self.assertIsInstance(root.getChildWithDefault(b'change_hook', req),
HTTPAuthSessionWrapper)
@defer.inlineCallbacks
def test_setupSiteWithHook(self):
new_config = self.makeConfig(
change_hook_dialects={'base': True})
self.svc.setupSite(new_config)
site = self.svc.site
# check that it has the right kind of resources attached to its
# root
root = site.resource
req = mock.Mock()
ep = root.getChildWithDefault(b'change_hook', req)
self.assertIsInstance(ep,
change_hook.ChangeHookResource)
# not yet configured
self.assertEqual(ep.dialects, {})
yield self.svc.reconfigServiceWithBuildbotConfig(new_config)
# now configured
self.assertEqual(ep.dialects, {'base': True})
rsrc = self.svc.site.resource.getChildWithDefault(b'change_hook', mock.Mock())
path = b'/change_hook/base'
request = test_www_hooks_base._prepare_request({})
self.master.addChange = mock.Mock()
yield self.render_resource(rsrc, path, request=request)
self.master.addChange.assert_called()
@defer.inlineCallbacks
def test_setupSiteWithHookAndAuth(self):
fn = self.mktemp()
with open(fn, 'w') as f:
f.write("user:pass")
new_config = self.makeConfig(
port=8080,
plugins={},
change_hook_dialects={'base': True},
change_hook_auth=[strcred.makeChecker("file:" + fn)])
self.svc.setupSite(new_config)
yield self.svc.reconfigServiceWithBuildbotConfig(new_config)
rsrc = self.svc.site.resource.getChildWithDefault(b'', mock.Mock())
res = yield self.render_resource(rsrc, b'')
self.assertIn(b'{"type": "file"}', res)
rsrc = self.svc.site.resource.getChildWithDefault(
b'change_hook', mock.Mock())
res = yield self.render_resource(rsrc, b'/change_hook/base')
# as UnauthorizedResource is in private namespace, we cannot use
# assertIsInstance :-(
self.assertIn('UnauthorizedResource', repr(res))
class TestBuildbotSite(unittest.SynchronousTestCase):
SECRET = 'secret'
def setUp(self):
self.site = service.BuildbotSite(None, "logs", 0, 0)
sel |
chrisspen/asklet | asklet/settings.py | Python | lgpl-3.0 | 348 | 0 | from django.conf | import settings
from . import constants as c
# Where the data, primarily the weights, are stored.
settings.ASKLET_BACKEND = getattr(
settings,
'ASKLET_BACKEND',
c.SQL)
# What mechanism we use to calculate ranks.
# Dependent on backend.
settings.ASKLET_RANKER = getattr(
settings,
'ASKLET_RANKER',
c. | SQL)
|
NewpTone/hotzenplotz | hotzenplotz/db/sqlalchemy/session.py | Python | apache-2.0 | 4,028 | 0 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 xxxx Corporation
# All Rights Reserved.
# Author: Yu xingchao <yuxcer@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models for hotzenplotz data.
"""
import time
import sqlalchemy as sql
from sqlalchemy import exc
from sqlalchemy import orm
from sqlalchemy.engine import url
from hotzenplotz.openstack.common import cfg
from hotzenplotz.openstack.common import log as logging
LOG = logging.getLogger(__name__)
sql_opts = [
cfg.StrOpt('sql_connection',
default='mysql://root:2317@localhost:3306/hotzenplotz',
help='Database connection'),
cfg.IntOpt('sql_connection_debug',
default=0,
help='Verbosity of SQL debugging info, 0=None, 100=all'),
cfg.IntOpt('sql_max_retries',
default=10,
help='Max retry times when database connection error occur'),
cfg.StrOpt('reconnect_interval',
default=3,
help='Retry interval when database connection error occur'),
]
cfg.CONF.register_opts(sql_opts)
_MAKER = None
_ENGINE = None
class MySQLPingListener(object):
"""
Ensures that MySQL connections checked out of the
pool are alive.
Borrowed from:
http://groups.google.com/group/sqlalchemy/msg/a4ce563d802c929f
"""
def checkout(self, dbapi_con, con_record, con_proxy):
try:
dbapi_con.cursor().execute('select 1')
except dbapi_con.OperationalError, e:
if e.args[0] in (2006, 2013, 2014, 2045, 2055):
raise exc.DisconnectionError("Database server went away")
else:
raise
def is_connection_error(args):
"""Return True if error in connecting to db."""
conn_err_codes = ('2002', '2003', '2006')
for err_code in conn_err_codes:
if args.find(err_code) != -1:
return True
return False
def get_engine():
global _ENGINE
if not _ENGINE:
connection_dict = url.make_url(cfg.CONF.sql_connection)
engine_args = {
'pool_recycle': 3600,
'echo': False,
'convert_unicode': True,
}
# Map our SQL debug level to SQLAlchemy's options
if cfg.CONF.sql_connection_debug >= 100:
engine_args['echo'] = 'debug'
elif cfg.CONF.sql_connection_debug >= 50:
engine_args['echo'] = True
if 'mysql' in connection_dict.drivername:
engine_args['listeners'] = [MySQLPingListener()]
_ENGINE = sql.create_engine(cfg.CONF.sql_connection, **engine_args)
sql_max_retries = cfg.CONF.get('sql_max_retries', 3)
reconnect_interval = cfg.CONF.get('reconnect_interval', 3)
while True:
try:
_ENGINE.connect()
break
except exc.OperationalError as e:
if not sql_max_retries or \
not is_connection_error(e.args[0]):
raise
| sql_max_retries -= 1
time.sleep(rec | onnect_interval)
return _ENGINE
def get_session(autocommit=True, expire_on_commit=True):
"""Helper method to grab session"""
global _MAKER, _ENGINE
if not _MAKER:
engine = get_engine()
_MAKER = orm.sessionmaker(bind=engine,
autocommit=autocommit,
expire_on_commit=expire_on_commit)
return _MAKER()
|
Ichaelus/Github-Classifier | Application/Models/ClassificationModules/foldernameslstm.py | Python | mit | 3,635 | 0.006059 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from Models.FeatureProcessing import *
from keras.models import Sequential
from keras.layers import Activation, Dense, LSTM
from keras.optimizers import Adam, SGD
import numpy as np
import abc
from ClassificationModule import ClassificationModule
class foldernameslstm(ClassificationModule):
"""A basic lstm neural network"""
def __init__(self, num_hidden_layers=3):
ClassificationModule.__init__(self, "Foldernames Only LSTM", "A LSTM reading the foldernames character by character")
hidden_size = 250
self.maxlen = 100
# Set output_size
self.output_size = 7 # Hardcoded for 7 classes
model = Sequential()
# Maximum of self.maxlen charcters allowed, each in one-hot-encoded array
model.add(LSTM(hidden_size, input_shape=(self.maxlen, getLstmCharLength())))
for _ in range(num_hidden_layers):
model.add(Dense(hidden_size))
#model.add(LSTM(hidden_size)) Alternativ
model.add(Dense(self.output_size))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=SGD(),
metrics=['accuracy'])
self.model = model
print "\t-", self.name
def resetAllTraining(self):
"""Reset classification module to status before training"""
resetWeights(self.model)
def trainOnSample(self, sample, nb_epoch=1, shuffle=True, verbose=True):
"""Trainiere (inkrementell) mit Sample. Evtl zusätzlich mit best. Menge alter Daten, damit overfitten auf neue Daten verhindert wird."""
readme_vec = self.formatInputData(sample)
label_index = getLabelIndex(sample)
label_one_hot = np.expand_dims(oneHot(label_index), axis=0) # [1, 0, 0, ..] -> [[1, 0, 0, ..]] Necessary for keras
self.model.fit(readme_vec, label_one_hot, nb_epoch=nb_epoch, shuffle=shuffle, verbose=verbose)
def train(self, samples, nb_epoch=200, shuffle=True, verbose=True):
"""Trainiere mit Liste von Daten. Evtl weitere Paramter nötig (nb_epoch, learning_rate, ...)"""
train_samples = []
train_lables = []
for sample in samples:
formatted_sample = self.formatInputData(sample)[0].tolist()
train_samples.append(formatted_sample)
train_lables.append(oneHot(getLabelIndex(sample)))
train_lables = np.asarray(train_lables)
train_result = self.model.fit(train_samples, train_lables, nb_epoch=nb_epoch, shuffle=shuffle, verbose=verbose, class_weight=getClassWeights())
self.isTrained = True
return train_result
def predictLabel(self, sample):
"""Gibt zurück, wie der Klassifikator ein gegebenes Sample klassifizieren würde"""
if not self.isTrained:
return 0
sample = self.formatInputDa | ta(sample)
return np.argmax(self.model.predict(sample))
def predictLabelAndProbability(self, sample):
"""Return the probability the module assignes each label"""
if not self.isTrained:
return [0, 0, 0, 0, 0, 0, 0, 0]
sample = self.formatInputData(samp | le)
prediction = self.model.predict(sample)[0]
return [np.argmax(prediction)] + list(prediction) # [0] So 1-D array is returned
def formatInputData(self, sample):
"""Extract description and transform to vector"""
sd = getFoldernames(sample)
# Returns numpy array which contains 1 array with features
return np.expand_dims(lstmEncode(sd, maxlen=self.maxlen), axis=0)
|
pgularski/snippets | python/algorithms/topsort.py | Python | mit | 1,733 | 0.00058 | #!/usr/bin/env python
# −*− coding: UTF−8 −*−
# To | pological Sorting
from collections import defa | ultdict
def topsort(graph):
if not graph:
return []
# 1. Count every node's dependencies
count = defaultdict(int)
for node in graph:
for dependency in graph[node]:
count[dependency] += 1
# 2. Find initial nodes - The ones with no incoming edges, so the ones that
# no dependency points at
initial_nodes = [node for node in graph if count[node] == 0]
if graph and not initial_nodes:
raise Exception("Circular depenency detected")
# 3. Process each node in the order found in initial_nodes. Populate
# initial_nodes with processed node's dependencies if these aren't referred
# in any other node.
result = []
while initial_nodes:
node = initial_nodes.pop()
result.append(node)
for dependency in graph[node]:
count[dependency] -= 1
if count[dependency] == 0:
initial_nodes.append(dependency)
if len(result) != len(graph):
raise Exception("Circular depenency detected")
return result[::-1]
def test():
from simpletest import _assert, _assert_raises
a, b, c, d, e, f = 'abcdef'
graph = {}
_assert(topsort(graph), [])
graph = {
a: set([]),
}
_assert(topsort(graph), [a])
graph = {
a: set([d, b]),
b: set([d, c]),
c: set([d]),
d: set([]),
e: set([d]),
f: set([d, e]),
}
_assert(topsort(graph), [d, c, b, a, e, f])
graph = {
a: set([b]),
b: set([a]),
}
_assert_raises(Exception, topsort, graph)
if __name__ == '__main__':
test()
|
stdweird/aquilon | tests/broker/test_make.py | Python | apache-2.0 | 20,622 | 0.000485 | #!/usr/bin/env python2.6
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*-
# ex: set expandtab softtabstop=4 shiftwidth=4:
#
# Copyright (C) 2009,2010,2011,2012,2013 Contributor
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for testing the make command."""
import os
import unittest
if __name__ == "__main__":
import utils
utils.import_depends()
from brokertest import TestBrokerCommand
class TestMake(TestBrokerCommand):
# network based service mappings
def testmakeafsbynet_1_checkloc(self):
# Must by issued before map service.
command = ["make", "--hostname", "afs-by-net.aqd-unittest.ms.com"]
(out, err) = self.successtest(command)
command = "show host --hostname afs-by-net.aqd-unittest.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/afs/q.ny.ms.com", command)
def testmakeafsbynet_2_mapservice(self):
ip = self.net.netsvcmap.subnet()[0].ip
self.noouttest(["map", "service", "--networkip", ip,
"--service", "afs", "--instance", "afs-by-net"])
self.noouttest(["map", "service", "--networkip", ip,
"--service", "afs", "--instance", "afs-by-net2"])
def testmakeafsbynet_3_verifymapservice(self):
ip = self.net.netsvcmap.subnet()[0].ip
command = ["show_map", "--service=afs", "--instance=afs-by-net",
"--networkip=%s" % ip]
out = self.commandtest(command)
self.matchoutput(out,
"Archetype: aquilon Service: afs "
"Instance: afs-by-net Map: Network netsvcmap",
command)
def testmakeafsbynet_3_verifymapservice_proto(self):
ip = self.net.netsvcmap.subnet()[0].ip
command = ["show_map", "--service=afs", "--instance=afs-by-net",
"--networkip=%s" % ip, "--format=proto"]
out = self.commandtest(command)
servicemaplist = self.parse_servicemap_msg(out, expect=1)
service_map = servicemaplist.servicemaps[0]
self.failUnlessEqual(service_map.network.ip, str(ip))
self.failUnlessEqual(service_map.network.env_name, 'internal')
self.failUnlessEqual(service_map.service.name, 'afs')
self.failUnlessEqual(service_map.service.serviceinstances[0].name,
'afs-by-net')
def testmakeafsbynet_4_make(self):
command = ["make", "--hostname", "afs-by-net.aqd-unittest.ms.com"]
(out, err) = self.successtest(command)
command = "show host --hostname afs-by-net.aqd-unittest.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/afs/afs-by-net", command)
def testmakeafsbynet_5_mapconflicts(self):
ip = self.net.netsvcmap.subnet()[0].ip
command = ["map", "service", "--networkip", ip,
"--service", "afs", "--instance", "afs-by-net",
"--building", "whatever"]
out = self.badoptiontest(command)
self.matchoutput(out, "networkip conflicts with building", command)
# network / personality based service mappings
def testmakenetmappers_1_maplocsvc_nopers(self):
"""Maps a location based service map just to be overridden by a location
based personality service map"""
self.noouttest(["map", "service", "--building", "ut",
"--service", "netmap", "--instance", "q.ny.ms.com"])
command = ["make", "--hostname", "netmap-pers.aqd-unittest.ms.com"]
(out, err) = self.successtest(command)
command = "show host --hostname netmap-pers. | aqd-unittest.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/netmap/q.ny.ms.com", command)
| def testmakenetmappers_2_maplocsvc_pers(self):
"""Maps a location based personality service map to be overridden by a
network based personality service map"""
self.noouttest(["map", "service", "--building", "ut", "--personality",
"eaitools", "--archetype", "aquilon",
"--service", "netmap", "--instance", "p-q.ny.ms.com"])
command = ["make", "--hostname", "netmap-pers.aqd-unittest.ms.com"]
(out, err) = self.successtest(command)
command = "show host --hostname netmap-pers.aqd-unittest.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/netmap/p-q.ny.ms.com", command)
def testmakenetmappers_3_mapservice(self):
ip = self.net.netperssvcmap.subnet()[0].ip
self.noouttest(["map", "service", "--networkip", ip,
"--service", "netmap", "--instance", "netmap-pers",
"--personality", "eaitools",
"--archetype", "aquilon"])
def testmakenetmappers_4_verifymapservice(self):
ip = self.net.netperssvcmap.subnet()[0].ip
command = ["show_map", "--service=netmap", "--instance=netmap-pers",
"--networkip=%s" % ip, "--personality", "eaitools",
"--archetype", "aquilon"]
out = self.commandtest(command)
self.matchoutput(out,
"Archetype: aquilon Personality: eaitools "
"Service: netmap "
"Instance: netmap-pers Map: Network netperssvcmap",
command)
def testmakenetmappers_5_verifymapservice_proto(self):
ip = self.net.netperssvcmap.subnet()[0].ip
command = ["show_map", "--service=netmap", "--instance=netmap-pers",
"--networkip=%s" % ip, "--personality", "eaitools",
"--archetype", "aquilon", "--format=proto"]
out = self.commandtest(command)
servicemaplist = self.parse_servicemap_msg(out, expect=1)
service_map = servicemaplist.servicemaps[0]
self.failUnlessEqual(service_map.network.ip, str(ip))
self.failUnlessEqual(service_map.network.env_name, 'internal')
self.failUnlessEqual(service_map.service.name, 'netmap')
self.failUnlessEqual(service_map.service.serviceinstances[0].name,
'netmap-pers')
self.failUnlessEqual(service_map.personality.name, 'eaitools')
self.failUnlessEqual(service_map.personality.archetype.name, 'aquilon')
def testmakenetmappers_6_make(self):
command = ["make", "--hostname", "netmap-pers.aqd-unittest.ms.com"]
(out, err) = self.successtest(command)
command = "show host --hostname netmap-pers.aqd-unittest.ms.com"
out = self.commandtest(command.split(" "))
self.matchoutput(out, "Template: service/netmap/netmap-pers", command)
def testmakevmhosts(self):
for i in range(1, 6):
command = ["make", "--hostname", "evh%s.aqd-unittest.ms.com" % i,
"--osname", "esxi", "--osversion", "4.0.0",
"--buildstatus", "rebuild"]
(out, err) = self.successtest(command)
self.matchclean(err, "removing binding", command)
self.assert_(os.path.exists(os.path.join(
self.config.get("broker", "profilesdir"),
"evh1.aqd-unittest.ms.com%s" % self.profile_suffix)))
self.failUnless(os.path.exists(
self.build_profile_name("evh1.aqd-unittest.ms.com",
domain="unittest")))
servicedir = os.path.join(self.config.get("broker", "plenarydir"),
|
watchdogpolska/poradnia.siecobywatelska.pl | poradnia/cases/migrations/0035_case_jst.py | Python | bsd-3-clause | 635 | 0.001575 | # Generated by Django 1.11.21 on 2019-06-13 17:49
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [("teryt", "0001_initial"), ("cases", "0034_auto_20180104_0436")]
operations = [
migrations.AddField(
model_name="case",
name="jst",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.CASCADE,
to="teryt.JST",
| verbose_name="Unit of administrative division",
),
)
] | |
relic7/prodimages | python/dload_orcl-mysql-server_imgs_byPOorStyleList.py | Python | mit | 5,989 | 0.012692 | #!/usr/bin/env python
import os, sys, re, csv
######################################## ##### ########################################
######################################## Order ########################################
######################################## ##### ########################################
def arg_parser_simple():
import os,sys, urllib
args = sys.argv[1:]
regex_r = re.compile(r'.*?\r.*?')
regex_n = re.compile(r'.*?\n.*?')
args1 = args[0].split(' ') #('\n') #(','.join(str(arg) for arg in args)).split('\n')
return args1
def sqlQuery_styles_bypo(po_number):
import sqlalchemy, sys
#engine_cnx = sqlalchemy.create_engine('mysql+mysqldb://root:mysql@prodimages.ny.bluefly.com:3301/www_django')
engine_cnx = sqlalchemy.create_engine('oracle+cx_oracle://prod_team_ro:9thfl00r@«-vip.l3.bluefly.com:1521/bfyprd11')
connection = engine_cnx.connect()
#querymake_styles_bypoMySQL = "SELECT colorstyle FROM product_snapshot_live WHERE po_number like '{0}' AND image_ready_dt IS NOT NULL ORDER BY colorstyle".format(po_number)
querymake_StylesByPO_Oracle="SELECT POMGR.PRODUCT_COLOR.ID AS colorstyle, POMGR.PRODUCT_COLOR.VENDOR_STYLE AS vendor_style, POMGR.PO_LINE.PO_HDR_ID AS po_hdr_id FROM POMGR.PRODUCT_COLOR INNER JOIN POMGR.PO_LINE ON POMGR.PRODUCT_COLOR.ID = POMGR.PO_LINE.PRODUCT_COLOR_ID WHERE POMGR.PO_LINE.PO_HDR_ID = '{0}' order by POMGR.PRODUCT_COLOR.VENDOR_STYLE asc".format(po_number)
result = connection.execute(querymake_StylesByPO_Oracle)
colorstyles_list = []
vendor_colorstyle_kv = {}
for row in result:
vendor_colorstyle_kv[row['vendor_style']] = row['colorstyle']
colorstyles_list.append(row['colorstyle'])
connection.close()
return sorted(colorstyles_list), vendor_colorstyle_kv
def url_download_file(url,filepath):
import urllib
#error_check = urllib.urlopen(url)
#urlcode_value = error_check.getcode()
#print urlcode_value
if urllib.urlretrieve(url, filepath):
print "Retrieved: " + url + " ---> " + filepath
def download_server_imgs(style):
import ftplib, urllib
netsrv101_url = 'ftp://imagedrop:imagedrop0@netsrv101.l3.bluefly.com//mnt/images/images/'
colorstyle = str(style)
ext_PNG = '.png'
ext_JPG = '.jpg'
netsrv101_url_file = os.path.join(netsrv101_url, colorstyle[:4], colorstyle + ext_PNG)
colorstyle_file = os.p | ath.join(os.path.abspath(os.curdir), colorstyle + ext_PNG)
#try:
url_download_file(netsrv101_url_file, colorstyle_file)
alt = 0
imgs = []
for x in range(1,6):
try:
alt = x
ext_ALT = '_alt0{0}{1}'.format(str(alt),ext_PNG)
colorstylealt = colorstyle + ext_ALT
colorstyle_file | alt = os.path.join(os.path.abspath(os.curdir), colorstylealt)
netsrv101_url_filealt = os.path.join(netsrv101_url, colorstyle[:4], colorstylealt)
if urllib.urlretrieve(netsrv101_url_filealt, colorstyle_filealt) == None:
xf = url_download_file(netsrv101_url_filealt, colorstyle_filealt)
imgs.append(xf)
#return True
else:
pass
except IOError:
pass
return None
if imgs:
return imgs
# except IOError:
# pass
######################################## ##### ########################################
######################################## Run ########################################
######################################## ##### ########################################
def main():
import os,sys,shutil
old_po = ''
new_po = ''
styles_list = ''
po_number = ''
args = arg_parser_simple()
args = ['135330', '135810']
try:
if len(args) > 2:
styles_list = args
print len(styles_list)
elif len(args) == 2:
old_po = args[0]
new_po = args[1]
elif len(args) == 1:
po_number = args[0]
print po_number
styles_list = sqlQuery_styles_bypo(po_number)
except OSError:
print "Enter at least PO Number as 1st Arg or Nothing will Happen"
if styles_list:
for style in styles_list:
download_server_imgs(style)
elif po_number:
styles_list = sqlQuery_styles_bypo(po_number)
for style in styles_list:
download_server_imgs(style)
elif new_po:
newstyles_list, newstyles_dict = sqlQuery_styles_bypo(new_po)
oldstyles_list, oldstyles_dict = sqlQuery_styles_bypo(old_po)
#renamed_dir = os.path.join(os.path.abspath(os.path.expanduser('~')), new_po)
#os.makedirs(renamed_dir)
for k,v in oldstyles_dict.iteritems():
oldstyle = str(oldstyles_dict.get(k))
newstyle = str(newstyles_dict.get(k))
print oldstyle,newstyle
oldstylepng = oldstyle + '.png'
newstylepng = newstyle + '.png'
returned_files = ''
download_server_imgs(oldstyle)
shutil.copy(oldstylepng, newstylepng)
#
# print returned_files
# if returned_files:
# for returned_file in returned_files:
# os.rename = (returned_file, returned_file.replace(oldstyle,newstyle))
# print "Renamed: " + returned_file + " ---> " + returned_file.replace(oldstyle,newstyle)
# else:
# try:
# oldstylepng = oldstyle + '.png'
# os.rename = (oldstylepng, oldstylepng.replace(oldstyle,newstyle))
# print "Renamed: " + returned_file + " ---> " + returned_file.replace(oldstyle,newstyle)
# except TypeE:
# pass
#
###############################
if __name__ == '__main__':
main()
#x = main()
#print x
|
halfflat/nestmc-proto | doc/scripts/divio_docs_theme/__init__.py | Python | bsd-3-clause | 352 | 0 | from os impor | t path
__version__ = '0.0.22'
__version_full__ = __version__
def get_html_theme_path():
"""Return list of HTML theme paths."""
cur_dir = path.abspath(path.dirname(path.dirname(__file__)))
return cur_dir
def setup(app):
app.add_html_theme(
'divio_doc | s_theme',
path.abspath(path.dirname(__file__))
)
|
jreiberkyle/Kaggle_Data-Science-London | main.py | Python | bsd-3-clause | 900 | 0.016667 | '''
Created on May 9, 2014
@author: Jennifer Reiber Kyle
'''
from kaggle import Submitter, DataReader
from prediction import Preprocessor, Classifier
def runPrediction(sourceDirectory,submitDirectory):
reader = DataReader(dataDir)
p = Preprocessor(*reader.getData())
submitter = Submitter(submitDir)
train, trainLabels, test = p.getOriginalValues()
classifier = Classifier(train,trainLabels,test)
## Predict using Fancy SVM
predictions = classifier.FancySVM()
submitter.saveSubmission(predictions, "fancySVMSubmission")
## Predict using Model SVM
pre | dictions = classifier.ModelSVM()
submitter.saveSubmission(predictions, "modelSVMSubmission")
if __name__ == '__main__':
dataDir = r"D:\NWR\Kaggle\DataScienceLondon"
submitDir = r"D:\NWR\Kaggle\DataScienceLondon\tut_svm"
runPrediction(dataDir, submitDir | )
|
EricSB/discover | parsers/parse-nessus.py | Python | mit | 6,845 | 0.005844 | #!/usr/bin/env python
#
# Original code from - https://github.com/Clete2/NessusReport, modded by Lee Baird
# John Kim - additional modification completed to support UTF-8, support cli help, renaming output files
# Thanks to Securicon, LLC. for sponsoring development
import csv
import datetime
import re
import sys
import time
import utfdictcsv
import xml.etree.ElementTree as ET
################################################################
class NessusParser:
def loadXML(self, filename):
self.xml = ET.parse(filename)
self.rootElement = self.xml.getroot()
def getHosts(self):
return self.rootElement.findall("./Report/ReportHost")
################################################################
def getHostProperties(self, host):
properties = {}
hostProperties = host.findall("./HostProperties")[0]
_temp_ip = hostProperties.findall("./tag[@name='host-ip']")
if len(_temp_ip) > 0 and _temp_ip is not None:
properties['host-ip'] = _temp_ip[0].text
else:
properties['host-ip'] = host.attrib['name']
hostnames = hostProperties.findall("./tag[@name='netbios-name']")
if(len(hostnames) >= 1):
properties['netbios-name'] = hostnames[0].text
else:
hostnames = hostProperties.findall("./tag[@name='host-fqdn']")
if(len(hostnames) >= 1):
properties['netbios-name'] = hostnames[0].text
os = hostProperties.findall("./tag[@name='operating-system']")
if(len(os) >= 1):
properties['operating-system'] = os[0].text
else:
os = hostProperties.findall("./tag[@name='os']")
if(len(os) >= 1):
properties['operating-system'] = os[0].text
return properties
################################################################
def getReportItems(self, host):
return host.findall("./ReportItem")
def getReportItemProperties(self, reportItem):
properties = reportItem.attrib
if(properties.has_key('severity')):
del(properties['severity'])
if(properties.has_key('pluginFamily')):
del(properties['pluginFamily'])
return properties
################################################################
def getReportItemDetails(self, reportItem):
details = {}
details['description'] = reportItem.findall("./description")[0].text
pluginElements = reportItem.findall("./pl | ugin_output")
if(len(pluginElements) >= 1):
detai | ls['plugin_output'] = pluginElements[0].text
solutionElements = reportItem.findall("./solution")
if(len(solutionElements) >= 1):
details['solution'] = solutionElements[0].text
seealsoElements = reportItem.findall("./see_also")
if(len(seealsoElements) >= 1):
details['see_also'] = seealsoElements[0].text
cveElements = reportItem.findall("./cve")
if(len(cveElements) >= 1):
details['cve'] = cveElements[0].text
cvssElements = reportItem.findall("./cvss_base_score")
if(len(cvssElements) >= 1):
details['cvss_base_score'] = cvssElements[0].text
return details
################################################################
def transformIfAvailable(inputDict, inputKey, outputDict, outputKey):
if(inputDict.has_key(inputKey)):
inputDict[inputKey] = inputDict[inputKey].replace("\n"," ")
# Excel has a hard limit of 32,767 characters per cell. Let's make it an even 32K.
if(len(inputDict[inputKey]) > 32000):
inputDict[inputKey] = inputDict[inputKey][:32000] +" [Text Cut Due To Length]"
outputDict[outputKey] = inputDict[inputKey]
################################################################
if __name__ == "__main__":
if len(sys.argv) > 1:
header = ['CVSS Score','IP','FQDN','OS','Port','Vulnerability','Description','Proof','Solution','See Also','CVE']
with open("nessus.csv", "wb") as outFile:
csvWriter = utfdictcsv.DictUnicodeWriter(outFile, header, quoting=csv.QUOTE_ALL)
csvWriter.writeheader()
nessusParser = NessusParser()
for fileName in sys.argv[1:]:
# try:
nessusParser.loadXML(fileName)
hostReports = []
hosts = nessusParser.getHosts()
for host in hosts:
# Get properties for this host
hostProperties = nessusParser.getHostProperties(host)
# Get all findings for this host
reportItems = nessusParser.getReportItems(host)
for reportItem in reportItems:
reportItemDict = {}
# Get the metadata and details for this report item
reportItemProperties = nessusParser.getReportItemProperties(reportItem)
reportItemDetails = nessusParser.getReportItemDetails(reportItem)
# Create dictionary for line
transformIfAvailable(reportItemDetails, "cvss_base_score", reportItemDict, header[0])
transformIfAvailable(hostProperties, "host-ip", reportItemDict, header[1])
transformIfAvailable(hostProperties, "netbios-name", reportItemDict, header[2])
transformIfAvailable(hostProperties, "operating-system", reportItemDict, header[3])
transformIfAvailable(reportItemProperties, "port", reportItemDict, header[4])
transformIfAvailable(reportItemProperties, "pluginName", reportItemDict, header[5])
transformIfAvailable(reportItemDetails, "description", reportItemDict, header[6])
transformIfAvailable(reportItemDetails, "plugin_output", reportItemDict, header[7])
transformIfAvailable(reportItemDetails, "solution", reportItemDict, header[8])
transformIfAvailable(reportItemDetails, "see_also", reportItemDict, header[9])
transformIfAvailable(reportItemDetails, "cve", reportItemDict, header[10])
hostReports.append(reportItemDict)
csvWriter.writerows(hostReports)
# except:
# print "[!] Error processing {}".format(fileName)
# pass
outFile.close()
else:
print "\nUsage: ./parse-nessus.py input.nessus"
print "Any field longer than 32,000 characters will be truncated.\n".format(sys.argv[0])
exit()
|
lesh1k/VKStalk | src/core/models.py | Python | mit | 6,912 | 0.000145 | from __future__ import unicode_literals
from sqlalchemy import create_engine, Column, ForeignKey, Integer, String,\
Boolean, Unicode, Date, DateTime, and_, func
from sqlalchemy.orm import relationship, backref, sessionmaker
from sqlalchemy.engine.url import URL
from sqlalchemy.ext.declarative import declarative_base, declared_attr
from sqlalchemy.orm.exc import NoResultFound
from helpers.utils import convert_to_snake_case, as_client_tz, delta_minutes
from helpers.h_logging import get_logger
from datetime import datetime, timedelta
from config import settings
import urlparse
import pytz
engine = create_engine(URL(**settings.DATABASE))
Session = sessionmaker(bind=engine)
Base = declarative_base()
class BaseMixin(object):
@declared_attr
def __tablename__(cls):
return convert_to_snake_case(cls.__name__)
__mapper_args__ = {'always_refresh': True}
pk = Column(Integer, primary_key=True)
class User(BaseMixin, Base):
vk_id = Column(String, nullable=False, unique=True)
data = relationship("UserData", uselist=False, backref='user')
@property
def url(self):
db_session = Session()
db_session.add(self)
if self.vk_id.isdigit():
user_url = urlparse.urljoin(settings.SOURCE_URL, "id" + self.vk_id)
else:
user_url = urlparse.urljoin(settings.SOURCE_URL, self.vk_id)
db_session.cl | ose()
return user_url
@property
def last_visit_tex | t(self):
last_log = self.activity_logs[-1]
if last_log.is_online:
last_seen_line = 'Online'
else:
now = pytz.timezone(settings.CLIENT_TZ).localize(datetime.now())
last_visit_in_client_tz = as_client_tz(last_log.last_visit)
minutes_ago = delta_minutes(now, last_visit_in_client_tz)
delta_days = (now.date() - last_visit_in_client_tz.date()).days
if minutes_ago < 60:
last_seen_line = 'last seen {} minutes ago'.format(minutes_ago)
else:
if delta_days == 0:
strftime_tmpl = 'last seen today at %H:%M'
elif delta_days == 1:
strftime_tmpl = 'last seen yesterday at %H:%M'
else:
strftime_tmpl = 'last seen on %B %d at %H:%M'
last_seen_line = last_visit_in_client_tz.strftime(strftime_tmpl)
if last_log.is_mobile:
last_seen_line += ' [Mobile]'
return last_seen_line
@classmethod
def from_vk_id(cls, vk_id):
user = cls.get_by_vk_id(vk_id)
db_session = Session()
if not user:
get_logger('file').debug(
'User with vk_id={} not found. Creating.'.format(vk_id))
user = cls(vk_id=vk_id)
db_session.add(user)
db_session.commit()
else:
db_session.add(user)
if not user.data:
get_logger('file').debug(
'UserData absent. Creating and committing')
user.data = UserData()
db_session.commit()
db_session.close()
return user
@classmethod
def get_by_vk_id(cls, vk_id):
db_session = Session()
try:
user = db_session.query(cls).filter_by(vk_id=vk_id).one()
get_logger('file').debug(
'User with vk_id={} found and retrieved.'.format(vk_id))
except NoResultFound, e:
user = None
db_session.close()
return user
def activity_for(self, start, end):
db_session = Session()
query = db_session.query(
func.count(UserActivityLog.status).label('status_count'),
UserActivityLog.status
).filter(UserActivityLog.user_pk == self.pk)\
.filter(and_(
UserActivityLog.timestamp >= start,
UserActivityLog.timestamp <= end
))\
.group_by(UserActivityLog.status)\
.order_by('status_count DESC')
return query.all()
def get_name(self):
db_session = Session()
db_session.add(self)
user_name = self.data.name
db_session.close()
return user_name
class UserData(BaseMixin, Base):
user_pk = Column(Integer, ForeignKey('user.pk'))
name = Column(String)
birthday = Column(String)
photo = Column(String)
hometown = Column(String)
site = Column(String)
instagram = Column(String)
facebook = Column(String)
twitter = Column(String)
skype = Column(String)
phone = Column(String)
university = Column(String)
studied_at = Column(String)
wallposts = Column(Integer)
photos = Column(Integer)
videos = Column(Integer)
followers = Column(Integer)
communities = Column(Integer)
noteworthy_pages = Column(Integer)
current_city = Column(String)
info_1 = Column(String)
info_2 = Column(String)
info_3 = Column(String)
@classmethod
def from_dict(cls, data):
inst = cls()
keys = set(data.keys()) & set(cls.__dict__.keys())
for key in keys:
setattr(inst, key, data[key])
return inst
@staticmethod
def get_diff(old, new):
changes = {}
excluded_attrs = ['pk', 'user_pk', '_sa_instance_state']
keys = [k for k in old.__dict__.keys()
if k not in excluded_attrs and "__" not in k]
for k in keys:
old_val = getattr(old, k)
new_val = getattr(new, k)
if old_val != new_val:
changes[k] = {'old': old_val, 'new': new_val}
return changes
class UserActivityLog(BaseMixin, Base):
user_pk = Column(Integer, ForeignKey('user.pk'))
user = relationship("User", backref='activity_logs')
is_online = Column(Boolean, default=True)
is_mobile = Column(Boolean, default=False)
status = Column(String)
updates = Column(String)
last_visit_lt_an_hour_ago = Column(Boolean, default=False)
last_visit = Column(DateTime(timezone=True))
timestamp = Column(DateTime(timezone=True),
default=datetime.now)
@classmethod
def from_dict(cls, data):
inst = cls()
keys = set(data.keys()) & set(cls.__dict__.keys())
for key in keys:
setattr(inst, key, data[key])
return inst
@staticmethod
def get_diff(old, new):
changes = {}
excluded_attrs = ['pk', 'user_pk', 'user', 'timestamp',
'_sa_instance_state']
keys = [k for k in old.__dict__.keys()
if k not in excluded_attrs and "__" not in k]
for k in keys:
old_val = getattr(old, k)
new_val = getattr(new, k)
if old_val != new_val:
changes[k] = {'old': old_val, 'new': new_val}
return changes
Base.metadata.create_all(engine)
|
jotes/pontoon | pontoon/sync/tests/test_sync_projects.py | Python | bsd-3-clause | 4,440 | 0.001126 | import io
from unittest.mock import ANY, patch, PropertyMock
from django.core.management.base import CommandError
import pytest
from pontoon.base.models import Project
from pontoon.base.tests import ProjectFactory, TestCase
from pontoon.base.utils import aware_datetime
from pontoon.sync.management.commands import sync_projects
from pontoon.sync.models import SyncLog
class CommandTests(TestCase):
def setUp(self):
super(CommandTests, self).setUp()
self.command = sync_projects.Command()
self.command.verbosity = 0
self.command.no_commit = False
self.command.no_pull = False
self.command.force = False
self.command.stderr = io.StringIO()
Project.objects.filter(slug="pontoon-intro").delete()
self.mock_sync_project = self.patch_object(sync_projects, "sync_project")
def execute_command(self, *args, **kwargs):
kwargs.setdefault("verbosity", 0)
kwargs.setdefault("no_commit", False)
kwargs.setdefault("no_pull", False)
kwargs.setdefault("force", False)
self.command.handle(*args, **kwargs)
def test_syncable_projects_only(self):
"""
Only sync projects that aren't disabled
and for which sync isn't disabled.
"""
ProjectFactory.create(disabled=True)
ProjectFactory.create(sync_disabled=True)
active_project = ProjectFactory.create(disabled=False, sync_disabled=False,)
self.execute_command()
self.mock_sync_project.delay.assert_called_with(
active_project.pk, ANY, no_pull=False, no_commit=False, force=False,
)
def test_non_repository_projects(self):
"""Only sync projects with data_source=repository."""
ProjectFactory.create(data_source="database")
repo_project = ProjectFactory.create(data_source="repository")
self.execute_command()
self.mock_sync_project.delay.assert_called_with(
repo_project.pk, ANY, no_pull=False, no_commit=False, force=False,
)
def test_project_slugs(self):
"""
If project slugs are passed to Command.handle, only sync projects
matching those slugs.
"""
ignore_project, handle_project = ProjectFactory.create_batch(2)
self.execute_command(projects=handle_project.slug)
self.mock_sync_project.delay.assert_called | _with(
handle_project.pk, ANY, no_pull=False, no_commit=False, force=False,
)
def test_no_matching_projects(self):
"""
If no projects are found that match the given slugs, raise a
CommandError.
"""
| with pytest.raises(CommandError):
self.execute_command(projects="does-not-exist")
def test_invalid_slugs(self):
"""
If some of projects have invalid slug, we should warn user about them.
"""
handle_project = ProjectFactory.create()
self.execute_command(projects=handle_project.slug + ",aaa,bbb")
self.mock_sync_project.delay.assert_called_with(
handle_project.pk, ANY, no_pull=False, no_commit=False, force=False,
)
assert (
self.command.stderr.getvalue()
== "Couldn't find projects to sync with following slugs: aaa, bbb"
)
def test_cant_commit(self):
"""If project.can_commit is False, do not sync it."""
project = ProjectFactory.create()
with patch.object(
Project, "can_commit", new_callable=PropertyMock
) as can_commit:
can_commit.return_value = False
self.execute_command(projects=project.slug)
assert not self.mock_sync_project.delay.called
def test_options(self):
project = ProjectFactory.create()
self.execute_command(no_pull=True, no_commit=True)
self.mock_sync_project.delay.assert_called_with(
project.pk, ANY, no_pull=True, no_commit=True, force=False
)
def test_sync_log(self):
"""Create a new sync log when command is run."""
assert not SyncLog.objects.exists()
ProjectFactory.create()
with patch.object(sync_projects, "timezone") as mock_timezone:
mock_timezone.now.return_value = aware_datetime(2015, 1, 1)
self.execute_command()
sync_log = SyncLog.objects.all()[0]
assert sync_log.start_time == aware_datetime(2015, 1, 1)
|
thomasw/djproxy | tests/helpers.py | Python | mit | 399 | 0 | from mock import patch
class RequestPatchMixin(object):
def patch_request(s | elf, mock_proxy_response=None):
"""patches requests.request and sets its return_value"""
request_patcher = patch('djproxy.views.request')
request = request_patcher.start()
request.return_value = mock_proxy_response
self.addCleanup(request_patcher.stop)
return request
| |
camomile-project/camomile-client-python | example/populate.py | Python | mit | 3,552 | 0.002252 | # The MIT License (MIT)
# Copyright (c) 2014-2015 CNRS
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation t | he rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCH | ANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# === EDIT ====================================================================
SERVER = 'http://localhost:12345'
ROOT_PASSWORD = 'password'
# admin usernname and password
ADMIN = 'administrator'
ADMIN_PASSWORD = 'password'
# template of path to video files (relative to /media)
URL = 'REPERE/phase2/test/{name}'
# =============================================================================
from camomile import Camomile
client = Camomile(SERVER)
# login as root
client.login('root', ROOT_PASSWORD)
# create new admin user
admin = client.createUser(ADMIN, ADMIN_PASSWORD, role='admin', returns_id=True)
# login as admin
client.login(ADMIN, ADMIN_PASSWORD)
# create new corpus
corpus = client.createCorpus('REPERE', returns_id=True)
# add media to corpus and keep track of their IDs
mediaID = {}
with open('media.lst', 'r') as f:
for medium in f:
# remove trailing "\n"
name = medium.strip()
# create medium
mediaID[name] = client.createMedium(
corpus, name, url=URL.format(name=name), returns_id=True)
# parse sample annotation files
def parse(path, mediaID):
annotations = []
with open(path, 'r') as f:
for line in f:
# remove trailing "\n" and split on spaces
tokens = line.strip().split()
# get medium ID
mediumName = tokens[0]
id_medium = mediaID[mediumName]
# get fragment start and end times
startTime = float(tokens[1])
endTime = float(tokens[2])
# get data
label = tokens[4]
annotation = {'fragment': {'start': startTime, 'end': endTime},
'data': label,
'id_medium': id_medium}
# append annotations to the list
annotations.append(annotation)
return annotations
# create reference layer
annotations = parse('reference.repere', mediaID)
reference = client.createLayer(
corpus, 'reference',
fragment_type='segment',
data_type='label',
annotations=annotations,
returns_id=True)
# create hypothesis layers
for i in [2]:
path = 'hypothesis{i}.repere'.format(i=i)
annotations = parse(path, mediaID)
hypothesis = client.createLayer(
corpus,
'hypothesis {i}'.format(i=i),
fragment_type='segment',
data_type='label',
annotations=annotations,
returns_id=True)
|
Instanssi/Instanssi.org | Instanssi/common/rest.py | Python | mit | 677 | 0 | # -*- coding: utf-8 -*-
from django.http import HttpResponse
import json
def rest_api(view_func):
def json_view(request, *args, **kwargs):
request.json_data = {}
if request.method == 'POST':
try:
request.json_data = json.loads(request.body)
except ValueError:
| pass
return view_func(request, *args, **kwargs)
json_view.csrf_exempt = True
return json_view
def RestResponse(data=None, code=200, error_text=''):
out = {
'code': code,
'errortext': error_text,
'content': data,
}
return HttpResponse(json.dumps(out), content_type='applica | tion/json')
|
ehuggett/send-cli | tests/test_common.py | Python | gpl-3.0 | 3,116 | 0.007702 | from sendclient.common import splitkeyurl
def test_splitkeyurl():
url = 'https://send.firefox.com/download/c8ab3218f9/#39EL7SuqwWNYe4ISl2M06g'
service, urlid, key = splitkeyurl(url)
assert service == 'https://send.firefox.com/'
assert urlid == 'c8ab3218f9'
assert key == '39EL7SuqwWNYe4ISl2M06g'
from sendclient.common import fileSize
def test_fileSize(testdata_1M):
# open the test file and move the pointer to check fileSize() does not change it
with open(str(testdata_1M), 'rb') as f:
f.seek(123)
# check the correct size is returned for the 1MiB file
assert fileSize(f) == 1024 * 1024
# check the pointer has not moved
assert f.tell() == 123
from sendclient.common import unpadded_urlsafe_b64encode
def test_unpadded_urlsafe_b64encode():
key = b'\xdf\xd1\x0b\xed+\xaa\xc1cX{\x82\x12\x97c4\xea'
assert unpadded_urlsafe_b64encode(key) == '39EL7SuqwWNYe4ISl2M06g'
from sendclient.common import unpadded_urlsafe_b64decode
def test_unpadded_urlsafe_b64decode():
jwk = '39EL7SuqwWNYe4ISl2M06g'
assert unpadded_urlsafe_b64decode(jwk) == b'\xdf\xd1\x0b\xed+\xaa\xc1cX{\x82\x12\x97c4\xea'
from sendclient.common import secretKeys
def test_secretKeys_known_good_keys():
# test data was obtained by adding | debug messages to {"commit":"188b28f" | ,"source":"https://github.com/mozilla/send/","version":"v1.2.4"}
testdata = {
'secretKey': b'q\xd94B\xa1&\x03\xa5<8\xddk\xee.\xea&',
'encryptKey': b'\xc4\x979\xaa\r\n\xeb\xc7\xa16\xa4%\xfd\xa6\x91\t',
'authKey': b'5\xa0@\xef\xd0}f\xc7o{S\x05\xe4,\xe1\xe4\xc2\x8cE\xa1\xfat\xc1\x11\x94e[L\x89%\xf5\x8b\xfc\xb5\x9b\x87\x9a\xf2\xc3\x0eKt\xdeL\xab\xa4\xa6%t\xa6"4\r\x07\xb3\xf5\xf6\xb9\xec\xcc\x08\x80}\xea',
'metaKey': b'\xd5\x9dF\x05\x86\x1a\xfdi\xaeK+\xe7\x8e\x7f\xf2\xfd',
'password': 'drowssap',
'url': 'http://192.168.254.87:1443/download/fa4cd959de/#cdk0QqEmA6U8ON1r7i7qJg',
'newAuthKey': b'U\x02F\x19\x1b\xc1W\x03q\x86q\xbc\xe7\x84WB\xa7(\x0f\x8a\x0f\x17\\\xb9y\xfaZT\xc1\xbf\xb2\xd48\x82\xa7\t\x9a\xb1\x1e{cg\n\xc6\x995+\x0f\xd3\xf4\xb3kd\x93D\xca\xf9\xa1(\xdf\xcb_^\xa3',
}
# generate all keys
keys = secretKeys(secretKey=testdata['secretKey'], password=testdata['password'], url=testdata['url'])
# Check every key has the expected value
assert keys.secretKey == testdata['secretKey']
assert keys.encryptKey == testdata['encryptKey']
assert keys.authKey == testdata['authKey']
assert keys.metaKey == testdata['metaKey']
assert keys.password == testdata['password']
assert keys.url == testdata['url']
assert keys.newAuthKey == testdata['newAuthKey']
def test_secretKeys_random_key_lengths():
# test key generation without providing the master secretKey
keys = secretKeys()
assert len(keys.secretKey) == 16
assert len(keys.encryptKey) == 16
assert len(keys.encryptIV) == 12
assert len(keys.authKey) == 64
assert len(keys.metaKey) == 16
assert len(keys.deriveNewAuthKey('drowssap', 'https://send.server/download/aFileID/#someSecretKey' )) == 64
|
timbroder/ai-stager | stager/jira/forms.py | Python | mit | 771 | 0.006485 | from django import forms
from stager.jira.models import *
from django.utils.functional import lazy
def type_choices():
return [(type.key, type.name) for type in JiraType.objects.filter | (allowed=True)]
class JiraTicketForm(forms.Form):
def __init__(self | , *args, **kwargs):
super(JiraTicketForm, self).__init__(*args, **kwargs)
self.fields['issue_type'] = forms.ChoiceField(choices=type_choices())
self.fields['description'] = forms.CharField(widget=forms.Textarea)
self.fields['steps_to_reproduce'] = forms.CharField(widget=forms.Textarea)
self.fields['attachment'] = forms.FileField(required=False)
summary = forms.CharField(widget=forms.Textarea)
#choices=[('bug', 'Bug'),
# ('feature', 'New Feature')])
|
kaldonis/ft-event-manager | src/lib/paste/auth/auth_tkt.py | Python | gpl-2.0 | 16,006 | 0.000375 | # (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
##########################################################################
#
# Copyright (c) 2005 Imaginary Landscape LLC and Contributors.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# | included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR AN | Y CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
##########################################################################
"""
Implementation of cookie signing as done in `mod_auth_tkt
<http://www.openfusion.com.au/labs/mod_auth_tkt/>`_.
mod_auth_tkt is an Apache module that looks for these signed cookies
and sets ``REMOTE_USER``, ``REMOTE_USER_TOKENS`` (a comma-separated
list of groups) and ``REMOTE_USER_DATA`` (arbitrary string data).
This module is an alternative to the ``paste.auth.cookie`` module;
it's primary benefit is compatibility with mod_auth_tkt, which in turn
makes it possible to use the same authentication process with
non-Python code run under Apache.
"""
import time as time_mod
try:
import hashlib
except ImportError:
# mimic hashlib (will work for md5, fail for secure hashes)
import md5 as hashlib
import Cookie
from paste import request
from urllib import quote as url_quote
from urllib import unquote as url_unquote
DEFAULT_DIGEST = hashlib.md5
class AuthTicket(object):
"""
This class represents an authentication token. You must pass in
the shared secret, the userid, and the IP address. Optionally you
can include tokens (a list of strings, representing role names),
'user_data', which is arbitrary data available for your own use in
later scripts. Lastly, you can override the timestamp, cookie name,
whether to secure the cookie and the digest algorithm (for details
look at ``AuthTKTMiddleware``).
Once you provide all the arguments, use .cookie_value() to
generate the appropriate authentication ticket. .cookie()
generates a Cookie object, the str() of which is the complete
cookie header to be sent.
CGI usage::
token = auth_tkt.AuthTick('sharedsecret', 'username',
os.environ['REMOTE_ADDR'], tokens=['admin'])
print 'Status: 200 OK'
print 'Content-type: text/html'
print token.cookie()
print
... redirect HTML ...
Webware usage::
token = auth_tkt.AuthTick('sharedsecret', 'username',
self.request().environ()['REMOTE_ADDR'], tokens=['admin'])
self.response().setCookie('auth_tkt', token.cookie_value())
Be careful not to do an HTTP redirect after login; use meta
refresh or Javascript -- some browsers have bugs where cookies
aren't saved when set on a redirect.
"""
def __init__(self, secret, userid, ip, tokens=(), user_data='',
time=None, cookie_name='auth_tkt',
secure=False, digest_algo=DEFAULT_DIGEST):
self.secret = secret
self.userid = userid
self.ip = ip
if not isinstance(tokens, basestring):
tokens = ','.join(tokens)
self.tokens = tokens
self.user_data = user_data
if time is None:
self.time = time_mod.time()
else:
self.time = time
self.cookie_name = cookie_name
self.secure = secure
if isinstance(digest_algo, str):
# correct specification of digest from hashlib or fail
self.digest_algo = getattr(hashlib, digest_algo)
else:
self.digest_algo = digest_algo
def digest(self):
return calculate_digest(
self.ip, self.time, self.secret, self.userid, self.tokens,
self.user_data, self.digest_algo)
def cookie_value(self):
v = '%s%08x%s!' % (self.digest(), int(self.time), url_quote(self.userid))
if self.tokens:
v += self.tokens + '!'
v += self.user_data
return v
def cookie(self):
c = Cookie.SimpleCookie()
c[self.cookie_name] = self.cookie_value().encode('base64').strip().replace('\n', '')
c[self.cookie_name]['path'] = '/'
if self.secure:
c[self.cookie_name]['secure'] = 'true'
return c
class BadTicket(Exception):
"""
Exception raised when a ticket can't be parsed. If we get
far enough to determine what the expected digest should have
been, expected is set. This should not be shown by default,
but can be useful for debugging.
"""
def __init__(self, msg, expected=None):
self.expected = expected
Exception.__init__(self, msg)
def parse_ticket(secret, ticket, ip, digest_algo=DEFAULT_DIGEST):
"""
Parse the ticket, returning (timestamp, userid, tokens, user_data).
If the ticket cannot be parsed, ``BadTicket`` will be raised with
an explanation.
"""
if isinstance(digest_algo, str):
# correct specification of digest from hashlib or fail
digest_algo = getattr(hashlib, digest_algo)
digest_hexa_size = digest_algo().digest_size * 2
ticket = ticket.strip('"')
digest = ticket[:digest_hexa_size]
try:
timestamp = int(ticket[digest_hexa_size:digest_hexa_size + 8], 16)
except ValueError, e:
raise BadTicket('Timestamp is not a hex integer: %s' % e)
try:
userid, data = ticket[digest_hexa_size + 8:].split('!', 1)
except ValueError:
raise BadTicket('userid is not followed by !')
userid = url_unquote(userid)
if '!' in data:
tokens, user_data = data.split('!', 1)
else:
# @@: Is this the right order?
tokens = ''
user_data = data
expected = calculate_digest(ip, timestamp, secret,
userid, tokens, user_data,
digest_algo)
if expected != digest:
raise BadTicket('Digest signature is not correct',
expected=(expected, digest))
tokens = tokens.split(',')
return (timestamp, userid, tokens, user_data)
# @@: Digest object constructor compatible with named ones in hashlib only
def calculate_digest(ip, timestamp, secret, userid, tokens, user_data,
digest_algo):
secret = maybe_encode(secret)
userid = maybe_encode(userid)
tokens = maybe_encode(tokens)
user_data = maybe_encode(user_data)
digest0 = digest_algo(
encode_ip_timestamp(ip, timestamp) + secret + userid + '\0'
+ tokens + '\0' + user_data).hexdigest()
digest = digest_algo(digest0 + secret).hexdigest()
return digest
def encode_ip_timestamp(ip, timestamp):
ip_chars = ''.join(map(chr, map(int, ip.split('.'))))
t = int(timestamp)
ts = ((t & 0xff000000) >> 24,
(t & 0xff0000) >> 16,
(t & 0xff00) >> 8,
t & 0xff)
ts_chars = ''.join(map(chr, ts))
return ip_chars + ts_chars
def maybe_encode(s, encoding='utf8'):
if isinstance(s, unicode):
s = s.encode(encoding)
return s
class AuthTKTMiddleware(object):
"""
Middleware that checks for signed cookies that match what
`mod_auth_tkt <http://www.openfusion.com.au/labs/mod_auth_tkt/>`_
looks for (if you h |
jwallen/ChemPy | chempy/species.py | Python | mit | 8,105 | 0.005059 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# ChemPy - A chemistry toolkit for Python
#
# Copyright (c) 2010 by Joshua W. Allen (jwallen@mit.edu)
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the 'Software'),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
################################################################################
"""
This module contains classes and functions for working with chemical species.
From the `IUPAC Compendium of Chemical Terminology
<http://dx.doi.org/10.1351/goldbook>`_, a chemical species is "an
ensemble of chemically identical molecular entities that can explore the same
set of molecular energy levels on the time scale of the experiment". This
definition is purposefully vague to allow the user flexibility in application.
In ChemPy, a chemical species is called a Species object and is represented in
memory as an instance of the :class:`Species` class.
"""
################################################################################
class LennardJones:
"""
A set of Lennard-Jones collision parameters. The Lennard-Jones parameters
:math:`\\sigma` and :math:`\\epsilon` correspond to the potential
.. math:: V(r) = 4 \\epsilon \\left[ \\left( \\frac{\\sigma}{r} \\right)^{12} - \\left( \\frac{\\sigma}{r} \\right)^{6} \\right]
where the first term represents repulsion of overlapping orbitals and the
second represents attraction due to van der Waals forces.
=============== =============== ============================================
Attribute Type Description
=============== =============== ============================================
`sigma` ``double`` Distance at which the inter-particle potential is zero
`epsilon` ``double`` Depth of the potential well in J
=============== =============== ============================================
"""
def __init__(self, sigma=0.0, epsilon=0.0):
self.sigma = sigma
self.epsilon = epsilon
################################################################################
class Species:
"""
A chemical species.
=================== ======================= ================================
Attribute Type Description
=================== ======================= ================================
`index` :class:`int` A unique nonnegative integer index
`label` :class:`str` A descriptive string label
`thermo` :class:`ThermoModel` The thermodynamics model for the species
`states` :class:`StatesModel` The molecular degrees of freedom model for the species
`molecule` ``list`` The :class:`Molecule` objects describing the molecular structure
`geometry` :class:`Geometry` The 3D geometry of the molecule
`E0` ``double`` The ground-state energy in J/mol
`lennardJones` :class:`LennardJones` A set of Lennard-Jones collision parameters
`molecularWeight` ``double`` The molecular weight of the species in kg/mol
`reactive` ``bool`` ``True`` if the species participates in reactions, ``False`` if not
=================== ======================= ================================
"""
def __init__(self, index=-1, label='', thermo=None, states=None, molecule=None, geometry=None, E0=0.0, lennardJones=None, molecularWeight=0.0, reactive=True):
self.index = index
self.label = label
self.thermo = thermo
self.states = states
self.molecule = molecule or []
self.geometry = geometry
self.E0 = E0
self.lennardJones = lennardJones
self.reactive = reactive
self.molecularWeight = molecularWeight
def __repr__(self):
"""
Return a string representation of the species, suitable for console output.
"""
return "<Species %i '%s'>" % (self.index, self.label)
def __str__(self):
"""
Return a string representation of the species, in the form 'label(id)'.
"""
if self.index == -1: return '%s' % (self.label)
else: return '%s(%i)' % (self.label, self.index)
def generateResonanceIsomers(self):
"""
Generate all of the resonance isomers of this species. The isomers are
stored as a list in the `molecule` attribute. If the length of
`molecule` is already greater than one, it is assumed that all of the
resonance isomers have already been generated.
"""
if len(self.molecule) != 1:
return
# Radicals
if sum([atom.radicalE | lectrons for atom in self.molecule[0].atoms]) > 0:
# Iterate over resonance isomers
index = 0
while index < len(self.molecule):
isomer = self.molecule[index]
newIsomers = isomer.getAdjacentResonanceIsomers()
for | newIsomer in newIsomers:
# Append to isomer list if unique
found = False
for isom in self.molecule:
if isom.isIsomorphic(newIsomer): found = True
if not found:
self.molecule.append(newIsomer)
newIsomer.updateAtomTypes()
# Move to next resonance isomer
index += 1
################################################################################
class TransitionState:
"""
A chemical transition state, representing a first-order saddle point on a
potential energy surface.
=============== =========================== ================================
Attribute Type Description
=============== =========================== ================================
`label` :class:`str` A descriptive string label
`states` :class:`StatesModel` The molecular degrees of freedom model for the species
`geometry` :class:`Geometry` The 3D geometry of the molecule
`E0` ``double`` The ground-state energy in J/mol
`frequency` ``double`` The negative frequency of the first-order saddle point in cm^-1
`degeneracy` ``int`` The reaction path degeneracy
=============== =========================== ================================
"""
def __init__(self, label='', states=None, geometry=None, E0=0.0, frequency=0.0, degeneracy=1):
self.label = label
self.states = states
self.geometry = geometry
self.E0 = E0
self.frequency = frequency
self.degeneracy = degeneracy
def __repr__(self):
"""
Return a string representation of the species, suitable for console output.
"""
return "<TransitionState '%s'>" % (self.label)
|
anhstudios/swganh | data/scripts/templates/object/weapon/mine/shared_wp_mine_xg.py | Python | mit | 434 | 0.048387 | #### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY | BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy. | object import *
def create(kernel):
result = Weapon()
result.template = "object/weapon/mine/shared_wp_mine_xg.iff"
result.attribute_template_id = 10
result.stfName("weapon_name","mine_xg")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result |
NicovincX2/Python-3.5 | Statistiques/Économétrie/Série temporelle/Damped-driven pendulum/tsa_constants.py | Python | gpl-3.0 | 299 | 0.006689 | # -*- coding: utf-8 -*-
"""
Define the global constants for the problem.
"""
import math
import os
q = 0.5
b0 = 0.9
omega | d = 2.0 / 3.0
l = 9.8
m = 1.0
g = 9.8
tmax = 1000
theta0_degree = 25.0
theta0 = math.radians(theta0_degree)
| dt = 0.05
bstep = 0.05
path = "plots/"
os.system("pause")
|
murphydavis/turbo-bear | forms.py | Python | mit | 4,475 | 0.001788 | from app import app
from fields import PreValidatedFormField
from flask_wtf import Form
from wtforms import (TextField, TextAreaField, FormField, FieldList,
HiddenField, SubmitField)
from wtforms.validators import (DataRequired, Optional, StopValidation)
EMPTY_VALUES = ['', None]
def DynamicListForm(unbound_field=None, min_entries=1):
class _DynamicListForm(Form):
hidden_stuff = HiddenField('stuff')
data_list = FieldList(unbound_field if unbound_field else TextField(),
min_entries=min_entries)
append_button = SubmitField('<b>+</b>')
def validate(self, extra_validators=None):
if self.append_button.data:
self.data_list.append_entry()
return super(_DynamicListForm, self).validate()
print 'DO I MAKE IT HERE?'
with app.app_context():
return _DynamicListForm()
class SearchForm(Form):
searchfield = TextField('searchfield', validators=[DataRequired()])
class MetadataForm(Form):
#relation_name = HiddenField('Relation Name')
uncertainty_measure = TextField('Uncertainty', validators=[DataRequired()])
classification = TextField('Classification', validators=[DataRequired()])
def pre_validate(self, form):
ref_data = (flatten_dict(form.data, ['metadata']))
map(lambda x: ref_data.discard(x), EMPTY_VALUES)
if len(ref_data) == 0:
raise StopValidation()
class ReportMetadataForm(Form):
uncertainty_measure = TextField('Uncertainty', validators=[DataRequired()])
classification = TextField('Classification', validators=[DataRequired()])
provenance = TextField('Provenance', validators=[DataRequired()])
created_at = TextField('Created at', validators=[DataRequired()])
created_by = TextField('Created by', validators=[DataRequired()])
def pre_validate(self, form):
ref_data = (flatten_dict(form.data, ['metadata']))
map(lambda x: ref_data.discard(x), EMPTY_VALUES)
if len(ref_data) == 0:
raise StopValidation()
class AnnotatedForm(Form):
metadata = PreValidatedFormField(MetadataForm)
def validate(self):
return super(AnnotatedForm, self).validate()
class AnnotatedTextForm(AnnotatedForm):
refdata = TextField('reference data', validators=[Optional()])
class AnnotatedTextAreaForm(AnnotatedForm):
refdata = TextAreaField('reference data', validators=[Optional()])
class AnnotatedIntForm(AnnotatedForm):
int_data = TextField('Some text', validators=[int])
class GroupForm(Form):
name = FormField(AnnotatedTextForm)
members = FieldList(FormField(AnnotatedTextAreaForm))
class OperatorForm(AnnotatedForm):
chartered_nations = FieldList(FormField(AnnotatedTextForm), min_entries=1)
autonomous_systems = FieldList(FormField(AnnotatedTextForm), min_entries=1)
#x = DynamicListForm(TextAreaField())
#tests = FormField(DynamicListForm(TextAreaField))
def flatten_dict(d, ignorekeys=[]):
flattened = []
for k, v in d.iteritems():
if k in ignorekeys:
continue
if isinstance(v, dict):
flattened.extend(flatten_dict(v))
else:
flattened.extend([v])
return flattened
def traverse_form_append_lists(form):
for field in form._fields:
_field = getattr(form, field)
if type(_field) == FormField:
if 'append_button' in _field._fields:
print 'LOL'
else:
print '>:('
elif type(_field) == FieldList:
import pprint
pp = pprint.PrettyPrinter()
pp.pprint(_field.unbound_field.field_class == FormField)
#for f in _field:
# traverse_form_append_lists(f)
return
class ReportEntryForm(AnnotatedForm):
'''
Class to encompass general data entry from a report, from a variety of
entry points, each with their own sub forms. Contains general data entry
metadata that should be shared among all fields, some of which is specified
| again per field (classification)
'''
#metadata = FormField(ReportMetadataForm)
#operators = DynamicListForm(FormField(OperatorForm), min_entries=1)
#description = FormField(DynamicListForm())
def __init__(self, *args, **k | wargs):
super(ReportEntryForm, self).__init__(*args, **kwargs)
#Use a completely separate form to add or remove fields from the page perhaps?
|
1flow/1flow | oneflow/processors/newspaper_common.py | Python | agpl-3.0 | 592 | 0 | # -*- coding: utf-8 -*-
""" 1flow base processor pack (see metadata for details). """
# Always import this, it will giv | e you a bunch of useful things handy.
from oneflow.core.models.reldb.processor.utils import * # NOQA
__all__ = [
'PROC | ESSOR_METADATA',
]
PROCESSOR_METADATA = {
'slug': '1fs-newspaper-common',
'name': u'NewsPaper (common)',
'author': 'Olivier Cortès <oc@1flow.io>',
'version': '1.0',
'requirements': u'''
newspaper==0.0.9.8
''',
'description': u'requirements-only processor for mutualized requirements.',
'categories': ('abstract', ),
}
|
ruleant/weblate | weblate/trans/models/unitdata.py | Python | gpl-3.0 | 10,092 | 0.000099 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2014 Michal Čihař <michal@cihar.com>
#
# This file is part of Weblate <http://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.db import models
from django.db.models import Count
from django.contrib.auth.models import User
from weblate.lang.models import Language
from weblate.trans.checks import CHECKS
from weblate.trans.models.unit import Unit
from weblate.trans.models.project import Project
from weblate.trans.models.changes import Change
from weblate.accounts.avatar import get_user_display
class RelatedUnitMixin(object):
'''
Mixin to provide access to related units for contentsum referenced objects.
'''
def get_related_units(self):
'''
Returns queryset with related units.
'''
related_units = Unit.objects.filter(
contentsum=self.contentsum,
translation__subproject__project=self.project,
)
if self.language is not None:
related_units = related_units.filter(
translation__language=self.language
)
return related_units
class SuggestionManager(models.Manager):
def add(self, unit, target, request):
'''
Creates new suggestion for this unit.
'''
from weblate.accounts.models import notify_new_suggestion
if not request.user.is_authenticated():
user = None
else:
user = request.user
# Create the suggestion
suggestion = self.create(
target=target,
contentsum=unit.contentsum,
language=unit.translation.language,
project=unit.translation.subproject.project,
user=user
)
# Record in change
Change.objects.create(
unit=unit,
action=Change.ACTION_SUGGESTION,
translation=unit.translation,
user=user,
author=user
)
# Add unit vote
if user is not None and unit.can_vote_suggestions():
suggestion.add_vote(
unit.translation,
request,
True
)
# Notify subscribed users
notify_new_suggestion(unit, suggestion, user)
# Update suggestion stats
if user is not None:
user.profile.suggested += 1
user.profile.save()
# Update unit flags
for relunit in suggestion.get_related_units():
relunit.update_has_suggestion()
class Suggestion(models.Model, RelatedUnitMixin):
contentsum = models.CharField(max_length=40, db_index=True)
target = models.TextField()
user = models.ForeignKey(User, null=True, blank=True)
project = models.ForeignKey(Project)
language = models.ForeignKey(Language)
votes = models.ManyToManyField(
User,
through='Vote',
related_name | ='user_votes'
)
objects = SuggestionManager()
class Meta(object):
permissions = (
('accept_suggestion', "Can accept suggestion"),
('override_suggestion', 'Can override suggestion state'),
('vote_suggestion', 'Can vote for suggestion'),
)
app_label = 'trans'
def __unicode__(self):
return u'suggestion for {0} by {1}'.format(
self.contentsum,
| self.user.username if self.user else 'unknown',
)
def accept(self, translation, request):
allunits = translation.unit_set.filter(
contentsum=self.contentsum,
)
for unit in allunits:
unit.target = self.target
unit.fuzzy = False
unit.save_backend(
request, change_action=Change.ACTION_ACCEPT, user=self.user
)
self.delete()
def delete(self, *args, **kwargs):
super(Suggestion, self).delete(*args, **kwargs)
# Update unit flags
for unit in self.get_related_units():
unit.update_has_suggestion()
def get_matching_unit(self):
'''
Retrieves one (possibly out of several) unit matching
this suggestion.
'''
return self.get_related_units()[0]
def get_source(self):
'''
Returns source strings matching this suggestion.
'''
return self.get_matching_unit().source
def get_review_url(self):
'''
Returns URL which can be used for review.
'''
return self.get_matching_unit().get_absolute_url()
def get_user_display(self):
return get_user_display(self.user, link=True)
def get_num_votes(self):
'''
Returns number of votes.
'''
votes = Vote.objects.filter(suggestion=self)
positive = votes.filter(positive=True).aggregate(Count('id'))
negative = votes.filter(positive=False).aggregate(Count('id'))
return positive['id__count'] - negative['id__count']
def add_vote(self, translation, request, positive):
'''
Adds (or updates) vote for a suggestion.
'''
vote, dummy = Vote.objects.get_or_create(
suggestion=self,
user=request.user
)
if vote.positive != positive:
vote.positive = positive
vote.save()
# Automatic accepting
required_votes = translation.subproject.suggestion_autoaccept
if required_votes and self.get_num_votes() >= required_votes:
self.accept(translation, request)
class Vote(models.Model):
'''
Suggestion voting.
'''
suggestion = models.ForeignKey(Suggestion)
user = models.ForeignKey(User)
positive = models.BooleanField(default=True)
class Meta(object):
unique_together = ('suggestion', 'user')
app_label = 'trans'
def __unicode__(self):
if self.positive:
vote = '+1'
else:
vote = '-1'
return u'{0} for {1} by {2}'.format(
vote,
self.suggestion,
self.user.username,
)
class CommentManager(models.Manager):
def add(self, unit, user, lang, text):
'''
Adds comment to this unit.
'''
from weblate.accounts.models import notify_new_comment
new_comment = self.create(
user=user,
contentsum=unit.contentsum,
project=unit.translation.subproject.project,
comment=text,
language=lang
)
Change.objects.create(
unit=unit,
action=Change.ACTION_COMMENT,
translation=unit.translation,
user=user,
author=user
)
# Invalidate counts cache
if lang is None:
unit.translation.invalidate_cache('sourcecomments')
else:
unit.translation.invalidate_cache('targetcomments')
# Update unit stats
for relunit in new_comment.get_related_units():
relunit.update_has_comment()
# Notify subscribed users
notify_new_comment(
unit,
new_comment,
user,
unit.translation.subproject.report_source_bugs
)
class Comment(models.Model, RelatedUnitMixin):
contentsum = models.CharField(max_length=40, db_index=True)
comment = models.TextField()
user = models.ForeignKey(User, null=True, blank=True)
project = models.ForeignKey(Project)
language = models.ForeignKey(Language, null=True, blank=True)
timestamp = models.DateTimeField(auto_now_add=True |
hashbang/dotfiles | weechat/.weechat/python/otr.py | Python | mit | 75,565 | 0.001906 | # -*- coding: utf-8 -*-
# otr - WeeChat script for Of | f-the-Record IRC messaging
#
# DISCLAIMER: To the best of my knowledge this script securely provides OTR
# messaging in WeeChat, but I offer no guarantee. Please report any security
# holes you find.
#
# Copyright (c) 2012-2015 Matthew M. Boedicker <matthewm@boedicker.org>
# Nils Görs <weechatter@arcor.de>
# Daniel "koolfy" Faucon <koolfy@koolfy.be>
# Felix Eckhofer <felix@tribut.de>
#
# Report is | sues at https://github.com/mmb/weechat-otr
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import collections
import glob
import io
import os
import platform
import re
import traceback
import shlex
import shutil
import sys
class PythonVersion2(object):
"""Python 2 version of code that must differ between Python 2 and 3."""
def __init__(self):
import cgi
self.cgi = cgi
import HTMLParser
self.html_parser = HTMLParser
self.html_parser_init_kwargs = {}
import htmlentitydefs
self.html_entities = htmlentitydefs
def html_escape(self, strng):
"""Escape HTML characters in a string."""
return self.cgi.escape(strng)
def unicode(self, *args, **kwargs):
"""Return the Unicode version of a string."""
return unicode(*args, **kwargs)
def unichr(self, *args, **kwargs):
"""Return the one character string of a Unicode character number."""
return unichr(*args, **kwargs)
def to_unicode(self, strng):
"""Convert a utf-8 encoded string to a Unicode."""
if isinstance(strng, unicode):
return strng
else:
return strng.decode('utf-8', 'replace')
def to_str(self, strng):
"""Convert a Unicode to a utf-8 encoded string."""
return strng.encode('utf-8', 'replace')
class PythonVersion3(object):
"""Python 3 version of code that must differ between Python 2 and 3."""
def __init__(self, minor):
self.minor = minor
import html
self.html = html
import html.parser
self.html_parser = html.parser
if self.minor >= 4:
self.html_parser_init_kwargs = { 'convert_charrefs' : True }
else:
self.html_parser_init_kwargs = {}
import html.entities
self.html_entities = html.entities
def html_escape(self, strng):
"""Escape HTML characters in a string."""
return self.html.escape(strng, quote=False)
def unicode(self, *args, **kwargs):
"""Return the Unicode version of a string."""
return str(*args, **kwargs)
def unichr(self, *args, **kwargs):
"""Return the one character string of a Unicode character number."""
return chr(*args, **kwargs)
def to_unicode(self, strng):
"""Convert a utf-8 encoded string to unicode."""
if isinstance(strng, bytes):
return strng.decode('utf-8', 'replace')
else:
return strng
def to_str(self, strng):
"""Convert a Unicode to a utf-8 encoded string."""
return strng
if sys.version_info.major >= 3:
PYVER = PythonVersion3(sys.version_info.minor)
else:
PYVER = PythonVersion2()
import weechat
import potr
SCRIPT_NAME = 'otr'
SCRIPT_DESC = 'Off-the-Record messaging for IRC'
SCRIPT_HELP = """{description}
Quick start:
Add an OTR item to the status bar by adding '[otr]' to the config setting
weechat.bar.status.items. This will show you whether your current conversation
is encrypted, authenticated and logged. /set otr.* for OTR status bar
customization options.
Start a private conversation with a friend who has OTR: /query yourpeer hi
In the private chat buffer: /otr start
If you have not authenticated your peer yet, follow the instructions for
authentication.
You can, at any time, see the current OTR session status and fingerprints with:
/otr status
View OTR policies for your peer: /otr policy
View default OTR policies: /otr policy default
Start/Stop log recording for the current OTR session: /otr log [start|stop]
This will be reverted back to the previous log setting at the end of the session.
To refresh the OTR session: /otr refresh
To end your private conversation: /otr finish
This script supports only OTR protocol version 2.
""".format(description=SCRIPT_DESC)
SCRIPT_AUTHOR = 'Matthew M. Boedicker'
SCRIPT_LICENCE = 'GPL3'
SCRIPT_VERSION = '1.8.0'
OTR_DIR_NAME = 'otr'
OTR_QUERY_RE = re.compile(r'\?OTR(\?|\??v[a-z\d]*\?)')
POLICIES = {
'allow_v2' : 'allow OTR protocol version 2, effectively enable OTR '
'since v2 is the only supported version',
'require_encryption' : 'refuse to send unencrypted messages when OTR is '
'enabled',
'log' : 'enable logging of OTR conversations',
'send_tag' : 'advertise your OTR capability using the whitespace tag',
'html_escape' : 'escape HTML special characters in outbound messages',
'html_filter' : 'filter HTML in incoming messages',
}
READ_ONLY_POLICIES = {
'allow_v1' : False,
}
ACTION_PREFIX = '/me '
IRC_ACTION_RE = re.compile('^\x01ACTION (?P<text>.*)\x01$')
PLAIN_ACTION_RE = re.compile('^'+ACTION_PREFIX+'(?P<text>.*)$')
IRC_SANITIZE_TABLE = dict((ord(char), None) for char in '\n\r\x00')
global otr_debug_buffer
otr_debug_buffer = None
# Patch potr.proto.TaggedPlaintext to not end plaintext tags in a space.
#
# When POTR adds OTR tags to plaintext it puts them at the end of the message.
# The tags end in a space which gets stripped off by WeeChat because it
# strips trailing spaces from commands. This causes OTR initiation to fail so
# the following code adds an extra tab at the end of the plaintext tags if
# they end in a space.
#
# The patched version also skips OTR tagging for CTCP messages because it
# breaks the CTCP format.
def patched__bytes__(self):
# Do not tag CTCP messages.
if self.msg.startswith(b'\x01') and \
self.msg.endswith(b'\x01'):
return self.msg
data = self.msg + potr.proto.MESSAGE_TAG_BASE
for v in self.versions:
data += potr.proto.MESSAGE_TAGS[v]
if data.endswith(b' '):
data += b'\t'
return data
potr.proto.TaggedPlaintext.__bytes__ = patched__bytes__
def command(buf, command_str):
"""Wrap weechat.command() with utf-8 encode."""
debug(command_str)
weechat.command(buf, PYVER.to_str(command_str))
def privmsg(server, nick, message):
"""Send a private message to a nick."""
for line in message.splitlines():
command('', '/quote -server {server} PRIVMSG {nick} :{line}'.format(
server=irc_sanitize(server),
nick=irc_sanitize(nick),
line=irc_sanitize(line)))
def build_privmsg_in(fromm, target, msg):
"""Build inbound IRC PRIVMSG command."""
return ':{user} PRIVMSG {target} :{msg}'.format(
user=irc_sanitize(fromm),
target=irc_sanitize(target),
msg=irc_sanitize(msg))
def build_privmsgs_in(fromm, target, msg, prefix=''):
"""Build an inbound IRC PRIVMSG command for each line in msg.
If prefix is supplied, prefix each line of msg with it."""
cmd = []
for line in msg.splitlines():
cmd.append(build_privmsg_in(fromm, target, prefix+line))
return '\r\n'.join(cmd)
def build_privmsg_out(target, msg):
"""Build outbound IRC PRIVMSG command(s)."""
cmd = []
for line in msg.splitlines():
cmd.append('PRIVMSG {target} :{line}'.format(
target=irc_sanitize(target),
|
thegreathippo/crispy | crispy/customdicts.py | Python | mit | 2,133 | 0.000469 | from imbroglio import InnerDict
NULL = object()
class CustomDict(InnerDict):
def __repr__(self):
return type(self).__name__ + super().__repr__()
def copy(self):
return type(self)(*self.items())
class CollisionError(ValueError):
def __init__(self, key1, val1, key2, val2):
txt = "{0} cannot be mapped to {1}; {2} is mapped to it".format(
key1, val2, key2)
super().__init__(txt)
self.blocked_key = key1
self.blocking_key = key2
self.blocking_value = val2
if val1 is not NULL:
self.blocked_value = val1
class InvertibleDict(CustomDict):
CollisionError = CollisionError
def __init__(self):
self.inverse = dict()
super().__init__()
def clear(self):
super().cl | ear()
self.inverse.clear()
def __setitem__(self, item, value):
if value in self.inverse and self.inverse[value] != item:
item_val = self.get(item, NULL)
raise CollisionError(item, item_val, self.inverse[value], value)
if item in self:
| del self.inverse[self[item]]
super().__setitem__(item, value)
self.inverse[self[item]] = item
def __delitem__(self, item):
value = self[item]
del self.inverse[value]
super().__delitem__(item)
class ReversibleDict(CustomDict):
def __init__(self):
self.inverse = dict()
super().__init__()
def clear(self):
super().clear()
self.inverse.clear()
def __setitem__(self, item, value):
if item in self:
old_value = self[item]
self.inverse[old_value].remove(item)
if not self.inverse[old_value]:
del self.inverse[old_value]
if value not in self.inverse:
self.inverse[value] = set()
super().__setitem__(item, value)
self.inverse[value].add(item)
def __delitem__(self, item):
value = self[item]
self.inverse[value].remove(item)
if not self.inverse[value]:
del self.inverse[value]
super().__delitem__(item)
|
pwong-mapr/private-hue | desktop/libs/hadoop/setup.py | Python | apache-2.0 | 1,344 | 0.014137 | # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writi | ng, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under t | he License.
from setuptools import setup, find_packages
setup(
name = "hadoop",
version = "3.5.0",
url = 'http://github.com/cloudera/hue',
description = "Hadoop Libraries",
# Note that we're cheating by installing gen-py
# in hadoop's __init__.py.
packages = find_packages('src'),
package_dir = {'': 'src' },
install_requires = ['setuptools', 'desktop'],
# Even libraries need to be registered as desktop_apps,
# if they have configuration, like this one.
entry_points = { 'desktop.sdk.lib': 'hadoop=hadoop' },
)
|
vortex-ape/scikit-learn | sklearn/externals/joblib/externals/loky/backend/compat.py | Python | bsd-3-clause | 635 | 0 | # flake8: noqa
###############################################################################
# Compat file to | import the correct modules for each platform and python
# version.
#
# author: Thomas Moreau and Olivier grisel
#
import sys
if sys.version_info[:2] >= (3, 3):
import queue
else:
import Queue as queue
from pickle import PicklingError
if sys.version_info >= (3, 4):
from multiprocessing.process import BaseProcess
else:
from multiprocessing.process import Process as BaseProcess |
# Platform specific compat
if sys.platform == "win32":
from .compat_win32 import *
else:
from .compat_posix import *
|
jmartinm/InvenioAuthorLists | modules/websubmit/lib/functions/Insert_Modify_Record.py | Python | gpl-2.0 | 2,196 | 0.007286 | ## This file is part of Invenio.
## Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you | can redistribute it and/or |
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
__revision__ = "$Id$"
import os
import shutil
import time
from invenio.config import \
CFG_TMPDIR
from invenio.websubmit_config import InvenioWebSubmitFunctionError
from invenio.bibtask import task_low_level_submission
def Insert_Modify_Record(parameters, curdir, form, user_info=None):
"""
Modify existing record using 'curdir/recmysql' and BibUpload correct
mode. The file must therefore already have been created prior to this
execution of this function, for eg. using "Make_Modify_Record".
This function gets the output of BibConvert and uploads it into
the MySQL bibliographical database.
"""
global rn
if os.path.exists(os.path.join(curdir, "recmysqlfmt")):
recfile = "recmysqlfmt"
elif os.path.exists(os.path.join(curdir, "recmysql")):
recfile = "recmysql"
else:
raise InvenioWebSubmitFunctionError("Could not find record file")
initial_file = os.path.join(curdir, recfile)
final_file = os.path.join(CFG_TMPDIR, "%s_%s" % \
(rn.replace('/', '_'),
time.strftime("%Y-%m-%d_%H:%M:%S")))
shutil.copy(initial_file, final_file)
bibupload_id = task_low_level_submission('bibupload', 'websubmit.Insert_Modify_Record', '-c', final_file, '-P', '3')
open(os.path.join(curdir, 'bibupload_id'), 'w').write(str(bibupload_id))
return ""
|
vishalkuo/codeStats | utils/writer.py | Python | mit | 636 | 0.004717 | import json
def writeLanguageStats(name, percentages, individual, weights,total):
with open('report.txt', 'w') as outfile:
outfile.write('Total byte | s: ' | + str(total))
outfile.write('\n\nOverall code usage for '+name+':\n')
outfile.write(json.dumps(percentages, indent=1))
outfile.write('\n\nLanguage weights, this is avg(langPercentPerProject)/sum(allAvgs): ')
outfile.write(json.dumps(weights, indent=1))
outfile.write('\n\nBreakdown by repo including weights:\n')
outfile.write(json.dumps(individual, indent=1))
outfile.write('\n')
print('Generated report.txt')
|
bike-barn/red-green-refactor | tests/assignment_two/test_calc.py | Python | isc | 2,854 | 0 | import pytest
from calc import INTEGER, EOF, PLUS, Calc, CalcError
def test_calc_raises_error_on_invalid_tokens():
"""
Test that invalid tokens cause a ``CalcError`` and that the exception stack
trace contains useful information.
"""
input_text = "lumberjack" # Now with 100% more Monty Python references.
calc = Calc(text=input_text)
with pytest.raises(CalcError) as err:
calc.parse()
assert "Invalid token at position 0" in str(err.value)
def test_calc_raises_error_on_unexepected_syntax():
"""
Test that unexpected syntax causes a ``CalcError`` and that the exception
stack trace contains useful information.
"""
input_text = "+"
calc = Calc(text=input_text)
with pytest.raises(CalcError) as err:
calc.parse()
assert "Expected INTEGER at position 1, found PLUS" in str(err.value)
def test_calc_finds_eof_token_at_end_of_line():
"""
Test that, upon finding an end of line, a :class:`Calc` correctly tokenizes
an EOF :class:`Token`.
"""
input_text = ""
calc = Calc(text=input_text)
assert calc._next_token().type == EOF
def test_calc_finds_eof_token_after_int():
"""
Test that after consuming a solitary an INTEGER :class:`Token` a
:class:`Calc` will correctly tokenize an EOF :class:`Token`.
"""
input_text = "1"
calc = Calc(text=input_text)
token = calc._next_token()
assert token.type == INTEGER
assert token.value == 1
assert calc._next_token().type == EOF
def test_calc_can_consume_valid_token():
"""Test that a :class:`Calc` can consume a valid :class:`Token`."""
input_text = "1+1"
calc = Calc(text=input_text)
# Note: Since _next_token advances position one cannot simply
# >>> calc.current_token = Token(INTEGER, 1)
# The _next_token method MUST be called or this test will fail.
calc.current_token = calc._next_token()
calc._consume_token(INTEGER)
assert calc.current_token.type == P | LUS
def test_parse_supports_addition():
"""Test that a :class:`Calc` can correctly parse the addition operation."""
# Note: This function name was briefly duplicated and therefore didn't run.
input_text = "1+1"
calc = Calc(text=input_text)
assert calc.parse() == 2
def test_parse_sets_eof():
| """
Test that successfully parsing an arithmetic expression sets the
``current_token`` attribute of a :class:`Calc` to EOF.
"""
input_text = "1+1"
calc = Calc(text=input_text)
calc.parse()
assert calc.current_token.type == EOF
def test_parse_raises_error_on_invalid_expression():
"""
Test that attempting to parse an invalid expression allows a ``CalcError``
to propagate correctly.
"""
input_text = "+1"
calc = Calc(text=input_text)
with pytest.raises(CalcError):
calc.parse()
|
ThaTiemsz/jetski | rowboat/plugins/censor.py | Python | mit | 9,546 | 0.001152 | import re
import json
import urlparse
from holster.enum import Enum
from unidecode import unidecode
from disco.types.base import cached_property
from disco.types.channel import ChannelType
from disco.util.sanitize import S
from disco.api.http import APIException
from rowboat.redis import rdb
from rowboat.util.stats import timed
from rowboat.util.zalgo import ZALGO_RE
from rowboat.plugins import RowboatPlugin as Plugin
from rowboat.types import SlottedModel, Field, ListField, DictField, ChannelField, snowflake, lower
from rowboat.types.plugin import PluginConfig
from rowboat.models.message import Message
from rowboat.plugins.modlog import Actions
from rowboat.constants import INVITE_LINK_RE, URL_RE
CensorReason = Enum(
'INVITE',
'DOMAIN',
'WORD',
'ZALGO',
)
class CensorSubConfig(SlottedModel):
filter_zalgo = Field(bool, default=True)
filter_invites = Field(bool, default=True)
invites_guild_whitelist = ListField(snowflake, default=[])
invites_whitelist = ListField(lower, default=[])
invites_blacklist = ListField(lower, default=[])
filter_domains = Field(bool, default=True)
domains_whitelist = ListField(lower, default=[])
domains_blacklist = ListField(lower, default=[])
blocked_words = ListField(lower, default=[])
blocked_tokens = ListField(lower, default=[])
unidecode_tokens = Field(bool, default=False)
channel = Field(snowflake, default=None)
bypass_channel = Field(snowflake, default=None)
@cached_property
def blocked_re(self):
return re.compile(u'({})'.format(u'|'.join(
map(re.escape, self.blocked_tokens) +
map(lambda k: u'\\b{}\\b'.format(re.escape(k)), self.blocked_words)
)), re.I + re.U)
class CensorConfig(PluginConfig):
levels = DictField(int, CensorSubConfig)
channels = DictField(ChannelField, CensorSubConfig)
# It's bad kids!
class Censorship(Exception):
def __init__(self, reason, event, ctx):
self.reason = reason
self.event = event
self.ctx = ctx
self.content = S(event.content, escape_codeblocks=True)
@property
def details(self):
if self.reason is CensorReason.INVITE:
if self.ctx['guild']:
return u'invite `{}` to {}'.format(
self.ctx['invite'],
S(self.ctx['guild']['name'], escape_codeblocks=True)
)
else:
return u'invite `{}`'.format(self.ctx['invite'])
elif self.reason is CensorReason.DOMAIN:
if self.ctx['hit'] == 'whitelist':
return u'domain `{}` is not in whitelist'.format(S(self.ctx['domain'], escape_codeblocks=True))
else:
return u'domain `{}` is in blacklist'.format(S(self.ctx['domain'], escape_codeblocks=True))
elif self.reason is CensorReason.WORD:
return u'found blacklisted words `{}`'.format(
u', '.join([S(i, escape_codeblocks=True) for i in self.ctx['words']]))
elif self.reason is CensorReason.ZALGO:
return u'found zalgo at position `{}` in text'.format(
self.ctx['position']
)
@Plugin.with_config(CensorConfig)
class CensorPlugin(Plugin):
def compute_relevant_configs(self, event, author):
if event.channel_id in event.config.channels:
yield event.config.channels[event.channel.id]
if event.config.levels:
user_level = int(self.bot.plugins.get('CorePlugin').get_level(event.guild, author))
for level, config in event.config.levels.items():
if user_level <= level:
yield config
def get_invite_info(self, code):
if rdb.exists('inv:{}'.format(code)):
return json.loads(rdb.get('inv:{}'.format(code)))
try:
obj = self.client.api.invites_get(code)
except:
return
if obj.channel and obj.channel.type == ChannelType.GROUP_DM:
obj = {
'id': obj.channel.id,
'name': obj.channel.name
}
else:
obj = {
'id': obj.guild.id,
'name': obj.guild.name,
'icon': obj.guild.icon
}
# Cache for 12 hours
rdb.setex('inv:{}'.format(code), json.dumps(obj), 43200)
return obj
@Plugin.listen('MessageUpdate')
def on_message_update(self, event):
try:
msg = Message.get(id=event.id)
except Message.DoesNotExist:
self.log.warning('Not censoring MessageUpdate for id %s, %s, no stored message', event.channel_id, event.id)
return
if not event.content:
return
return self.on_message_create(
event,
author=event.guild.get_member(msg.author_id))
@Plugin.listen('MessageCreate')
def on_message_create(self, event, author=None):
author = author or event.author
if author.id == self.state.me.id:
return
if event.webhook_id:
return
configs = list(self.compute_relevant_configs(event, author))
if not configs:
return
tags = {'guild_id': event.guild.id, 'channel_id': event.channel.id}
with timed('rowboat.plugin.censor.duration', tags=tags):
try:
# TODO: perhaps imap here? how to raise exception then?
for config in configs:
if config.channel:
if event.channel_id != config.channel:
continue
if config.bypass_channel:
if event.channel_id == config.bypass_channel:
continue
if config.filter_zalgo:
self.filter_zalgo(event, config)
if config.filter_invites:
self.filter_invites(event, config)
if config.filter_domains:
self.filter_domains(event, config)
if config.blocked_words or config.blocked_tokens:
self.filter_blocked_words(event, config)
except Censorship as c:
self.call(
'ModLogPlugin.create_debounce',
event,
['MessageDelete'],
message_id=event.message.id,
)
try:
| event.delete()
self.call(
'ModLogPlugin.log_action_ext',
Actions.CENSORED,
event.guild.id,
e=event,
c=c)
except APIException:
self.log.exception('Failed to delete censored message: ')
def filter_zalgo(self, event, config):
s = ZALGO_RE.search(ev | ent.content)
if s:
raise Censorship(CensorReason.ZALGO, event, ctx={
'position': s.start()
})
def filter_invites(self, event, config):
invites = INVITE_LINK_RE.findall(event.content)
for _, invite in invites:
invite_info = self.get_invite_info(invite)
need_whitelist = (
config.invites_guild_whitelist or
(config.invites_whitelist or not config.invites_blacklist)
)
whitelisted = False
if invite_info and invite_info.get('id') in config.invites_guild_whitelist:
whitelisted = True
if invite.lower() in config.invites_whitelist:
whitelisted = True
if need_whitelist and not whitelisted:
raise Censorship(CensorReason.INVITE, event, ctx={
'hit': 'whitelist',
'invite': invite,
'guild': invite_info,
})
elif config.invites_blacklist and invite.lower() in config.invites_blacklist:
raise Censorship(CensorReason.INVITE, event, ctx={
'hit': 'blacklist',
|
jdpepperman/baseballStats | Batters.py | Python | gpl-2.0 | 2,886 | 0.043659 | #Joshua Pepperman
class Batters:
def __init__(self):
self.batters = []
def __iter__(self):
return iter(self.batters)
def __getitem__(self, key):
return self.batters[key]
def __len__(self):
return len(self.batters)
def indexOf(self, player):
index = 0
for p in self.batters:
if p == player:
return index
else:
index = index + 1
return -1
def addBatter(self, batter):
self.batters.append(batter)
def hasBatter(self, playerName):
for p in self.batters:
if p.getStat('name') == playerName:
return True
def calculateScores(self):
r = 1
h = 1
b2 = 2
b3 = 3
hr = 4
rbi = 1
sb = 2
cs = -1
bb = 1
for player in self.batters:
player.statDict['score'] = player.getStat('r')*r + player.getStat('h')*h + player.getStat('b2')*b2 + player.getStat('b3')*b3 + player.getStat('hr')*hr + player.getStat('rbi')*rbi + player.getStat('sb')*sb + player.getStat('cs')*cs + player.getStat('bb')*bb
def getStadiumRank(self, team):
rank = 0
if 'WSH' in team:
rank = 1
elif 'TOR' in team:
rank = 1
elif 'TEX' in team:
rank = 1
elif 'TB' in team:
rank = 1
elif 'STL' in team:
rank = 1
elif 'SF' in team:
rank = 1
elif 'SEA' in team:
rank = 1
elif 'SD' in team:
rank = 1
elif 'PIT' in team:
rank = 1
elif 'PHI' in team:
rank = 1
elif 'OAK' in team:
rank = 1
elif 'NYY' in team:
rank = 1
elif 'NYM' in team:
rank = 1
elif 'MIN' in team:
rank = 1
elif 'MIL' in team:
rank = 1
elif 'MIA' in team:
rank = 1
elif 'LAD' in team:
rank = 1
elif 'LAA' in team:
rank = 1
elif 'KC' in team:
rank = 1
elif 'HOU' in team:
rank = 1
elif 'DET' in team:
rank = 1
| elif 'COL' in team:
rank = 1
elif 'CLE' in team:
rank = 1
elif | 'CIN' in team:
rank = 1
elif 'CHW' in team:
rank = 1
elif 'CHC' in team:
rank = 1
elif 'BOS' in team:
rank = 1
elif 'BAL' in team:
rank = 1
elif 'ATL' in team:
rank = 1
elif 'ARI' in team:
rank = 1
def getBatter(self, playerName):
for player in self.batters:
if player.getStat('name') == playerName:
return player
def sortBy(self, index):
self.index = index
self.batters.sort(key=lambda x: x.statDict[index], reverse=True)
def toCSV(self):
batterStringFile = "name,ab,r,h,2b,3b,hr,rbi,sb,cs,bb,so,avg,obp,slg,ops,war,score\n"
for batter in self.batters:
batterStringFile = batterStringFile + batter.toCSV() + '\n'
return batterStringFile
def toString(self):
batterString = ""
for batter in self.batters:
batterString = batterString + batter.toString() + "\t" + str(batter.getStat(self.index)) + '\n'
return batterString
def toStringInRange(self, rang):
batterString = ""
for i in rang:
batterString = batterString + self.batters[i].toString() + "\t" + str(self.batters[i].getStat(self.index)) + '\n'
return batterString
|
JuniorJPDJ/aerogui | aerogui_config.py | Python | gpl-2.0 | 416 | 0.016827 | #nazwa komendy sluzacej do ponownego polaczenia aero
#reconnect = "reconnects\\android-flightmode\\adb shell < reconnects\\android-flightmode\\reconnect.adb"
reconnect = "reconnects\\windows-rasdial\\rasdial.bat"
#czas co jaki ma byc sprawdzane czy jest juz captcha (sekundy)
sleeptime | = 3
#odleglosc okna captchy od dolnej | krawedzi ekranu
oddolu = 100
#odleglosc okna captchy od prawej krawedzi ekranu
odprawej = 30 |
IntelLabs/numba | numba/cuda/tests/cudapy/test_sm.py | Python | bsd-2-clause | 14,079 | 0.000142 | from numba import cuda, int32, float64, void
from numba.core.errors import TypingError
from numba.cuda.testing import unittest, CUDATestCase, skip_on_cudasim
import numpy as np
from numba.np import numpy_support as nps
from .extensions_usecases import test_struct_model_type, TestStruct
recordwith2darray = np.dtype([('i', np.int32),
('j', np.float32, (3, 2))])
class TestSharedMemoryIssue(CUDATestCase):
def test_issue_953_sm_linkage_conflict(self):
@cuda.jit(device=True)
def inner():
inner_arr = cuda.shared.array(1, dtype=int32) # noqa: F841
@cuda.jit
def outer():
outer_arr = cuda.shared.array(1, dtype=int32) # noqa: F841
inner()
outer[1, 1]()
def _check_shared_array_size(self, shape, expected):
@cuda.jit
def s(a):
arr = cuda.shared.array(shape, dtype=int32)
a[0] = arr.size
result = np.zeros(1, dtype=np.int32)
s[1, 1](result)
self.assertEqual(result[0], expected)
def test_issue_1051_shared_size_broken_1d(self):
self._check_shared_array_size(2, 2)
def test_issue_1051_shared_size_broken_2d(self):
self._check_shared_array_size((2, 3), 6)
def test_issue_1051_shared_size_broken_3d(self):
self._check_shared_array_size((2, 3, 4), 24)
def test_issue_2393(self):
"""
Test issue of warp misalign address due to nvvm not knowing the
alignment(? but it should have taken the natural alignment of the type)
"""
num_weights = 2
num_blocks = 48
examples_per_block = 4
threads_per_block = 1
@cuda.jit
def costs_func(d_block_costs):
s_features = cuda.shared.array((examples_per_block, num_weights),
float64)
s_initialcost = cuda.shared.array(7, float64) # Bug
threadIdx = cuda.threadIdx.x
prediction = 0
for j in range(num_weights):
prediction += s_features[threadIdx, j]
d_block_costs[0] = s_initialcost[0] + prediction
block_costs = np.zeros(num_blocks, dtype=np.float64)
d_block_costs = cuda.to_device(block_costs)
costs_func[num_blocks, threads_per_block](d_block_costs)
cuda.synchronize()
class TestSharedMemory(CUDATestCase):
def _test_shared(self, arr):
# Use a kernel that copies via shared memory to check loading and
# storing different dtypes with shared memory. All threads in a block
# collaborate to load in values, then the output values are written
# only by the first thread in the block after synchronization.
nelem = len(arr)
nthreads = 16
nblocks = int(nelem / nthreads)
dt = nps.from | _dtype(arr.dtype)
@cuda.jit
def use_sm_chunk_copy(x, y):
sm = cuda.shared.array(nthreads, dtype=dt)
tx = cuda.threadIdx.x
bx = cuda.blockIdx.x
bd = cuda.blockDim.x
# Load this block's chunk i | nto shared
i = bx * bd + tx
if i < len(x):
sm[tx] = x[i]
cuda.syncthreads()
# One thread per block writes this block's chunk
if tx == 0:
for j in range(nthreads):
y[bd * bx + j] = sm[j]
d_result = cuda.device_array_like(arr)
use_sm_chunk_copy[nblocks, nthreads](arr, d_result)
host_result = d_result.copy_to_host()
np.testing.assert_array_equal(arr, host_result)
def test_shared_recarray(self):
arr = np.recarray(128, dtype=recordwith2darray)
for x in range(len(arr)):
arr[x].i = x
j = np.arange(3 * 2, dtype=np.float32)
arr[x].j = j.reshape(3, 2) * x
self._test_shared(arr)
def test_shared_bool(self):
arr = np.random.randint(2, size=(1024,), dtype=np.bool_)
self._test_shared(arr)
def _test_dynshared_slice(self, func, arr, expected):
# Check that slices of shared memory are correct
# (See Bug #5073 - prior to the addition of these tests and
# corresponding fix, slices of dynamic shared arrays all aliased each
# other)
nshared = arr.size * arr.dtype.itemsize
func[1, 1, 0, nshared](arr)
np.testing.assert_array_equal(expected, arr)
def test_dynshared_slice_write(self):
# Test writing values into disjoint slices of dynamic shared memory
@cuda.jit
def slice_write(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[0:1]
sm2 = dynsmem[1:2]
sm1[0] = 1
sm2[0] = 2
x[0] = dynsmem[0]
x[1] = dynsmem[1]
arr = np.zeros(2, dtype=np.int32)
expected = np.array([1, 2], dtype=np.int32)
self._test_dynshared_slice(slice_write, arr, expected)
def test_dynshared_slice_read(self):
# Test reading values from disjoint slices of dynamic shared memory
@cuda.jit
def slice_read(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[0:1]
sm2 = dynsmem[1:2]
dynsmem[0] = 1
dynsmem[1] = 2
x[0] = sm1[0]
x[1] = sm2[0]
arr = np.zeros(2, dtype=np.int32)
expected = np.array([1, 2], dtype=np.int32)
self._test_dynshared_slice(slice_read, arr, expected)
def test_dynshared_slice_diff_sizes(self):
# Test reading values from disjoint slices of dynamic shared memory
# with different sizes
@cuda.jit
def slice_diff_sizes(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[0:1]
sm2 = dynsmem[1:3]
dynsmem[0] = 1
dynsmem[1] = 2
dynsmem[2] = 3
x[0] = sm1[0]
x[1] = sm2[0]
x[2] = sm2[1]
arr = np.zeros(3, dtype=np.int32)
expected = np.array([1, 2, 3], dtype=np.int32)
self._test_dynshared_slice(slice_diff_sizes, arr, expected)
def test_dynshared_slice_overlap(self):
# Test reading values from overlapping slices of dynamic shared memory
@cuda.jit
def slice_overlap(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[0:2]
sm2 = dynsmem[1:4]
dynsmem[0] = 1
dynsmem[1] = 2
dynsmem[2] = 3
dynsmem[3] = 4
x[0] = sm1[0]
x[1] = sm1[1]
x[2] = sm2[0]
x[3] = sm2[1]
x[4] = sm2[2]
arr = np.zeros(5, dtype=np.int32)
expected = np.array([1, 2, 2, 3, 4], dtype=np.int32)
self._test_dynshared_slice(slice_overlap, arr, expected)
def test_dynshared_slice_gaps(self):
# Test writing values to slices of dynamic shared memory doesn't write
# outside the slice
@cuda.jit
def slice_gaps(x):
dynsmem = cuda.shared.array(0, dtype=int32)
sm1 = dynsmem[1:3]
sm2 = dynsmem[4:6]
# Initial values for dynamic shared memory, some to be overwritten
dynsmem[0] = 99
dynsmem[1] = 99
dynsmem[2] = 99
dynsmem[3] = 99
dynsmem[4] = 99
dynsmem[5] = 99
dynsmem[6] = 99
sm1[0] = 1
sm1[1] = 2
sm2[0] = 3
sm2[1] = 4
x[0] = dynsmem[0]
x[1] = dynsmem[1]
x[2] = dynsmem[2]
x[3] = dynsmem[3]
x[4] = dynsmem[4]
x[5] = dynsmem[5]
x[6] = dynsmem[6]
arr = np.zeros(7, dtype=np.int32)
expected = np.array([99, 1, 2, 99, 3, 4, 99], dtype=np.int32)
self._test_dynshared_slice(slice_gaps, arr, expected)
def test_dynshared_slice_write_backwards(self):
# Test writing values into disjoint slices of dynamic shared memory
# with negative steps
@cuda.jit
def slice_write_backwards |
proxeIO/name-panel | addon/interface/operator/shared.py | Python | gpl-3.0 | 1,967 | 0.001017 |
# sort
def sort(layout, option):
# separate
layout.separator()
# row
row = layout.row(align=True)
# sub
sub = row.row(align=True)
# scale x
sub.scale_x = 0.2
# sort
sub.prop(option, 'sort', toggle=True)
# sub sub
subsub = sub.row(align=True)
# active
subsub.active = option.sort
# scale x
subsub.scale_x = 5
# type
subsub.prop(option, 'type', text='')
# sub sub sub
subsubsub = subsub.row(align=True)
# active
subsubsub.active = option.type == 'POSITIONAL'
## tmp
# subsubsub.prop(option, 'invert', icon='CURSOR', text='')
# sub sub sub sub
# subsubsubsub = subsubsub.row(align=True)
# scale x
subsubsub.scale_x = 0.3
# axis
subsubsub.prop(option, 'axis', expand=True)
# subsub
subsub = sub.row(align=True)
# active
subsub.active = option.sort
# scale x
subsub.scale_x = 5.5
toggle = 'FULLSCREEN_ENTER' if option.invert else 'FULLSCREEN_EXIT'
# dummy 9
subsub.prop(option, 'invert', text='', icon=toggle, toggle=True)
# count
def count(layout, option):
# separate
layout.separator()
# row
row = layout.row(align=True)
# sub
sub = row.row(align=True)
# scale
sub.scale_x = 0.2
# sort
sub.prop(option, 'count', toggle=True)
# sub
subsub = sub.row(align=True)
# active
subsub.active = option.count
# icon
| icon = 'LINKED' if option.link else 'UNLINKED'
# link
subsub.prop(option, 'link', text='', icon=icon)
# pad
subsub.prop(option, 'pad', text='Pad')
# | start
subsub.prop(option, 'start', text='Start')
# step
subsub.prop(option, 'step', text='Step')
# sub
subsubsub = subsub.row(align=True)
# scale
subsubsub.scale_x = 0.1
# separate
subsubsub.prop(option, 'separator', text='')
# ignore
# subsub.prop(option, 'ignore', text='', icon='ZOOM_PREVIOUS')
|
nicoechaniz/IPP | bdd/features/environment.py | Python | agpl-3.0 | 3,472 | 0.004616 | # IPP, Plataforma web del Índice de Precios Popular
# Copyright (c) 2016 Nicolás Echániz and contributors.
#
# This file is part of IPP
#
# IPP is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from behave import *
from splinter.browser import Browser
from django.core import management
from ipp.relevamiento.factories import *
from ipp.relevamiento.constants import RELEVADOR, COORD_ZONAL, COORD_JURISDICCIONAL, COORD_REGIONAL, COORD_GRAL
def before_all(context):
# Unless we tell our test runner otherwise, set our default browser to PhantomJS
if context.config.browser:
context.browser = Browser(context.config.browser)
else:
context.browser = Browser('phantomjs')
# si la ventana es pequeña hay elementos que fallan al querer clickearlos
context.browser.driver.set_window_size(1280, 1024)
def before_scenario(context, scenario):
management.call_command('flush', verbosity=0, interactive=False)
# At this stage we can (optionally) mock additional data to setup in the database.
# For example, if we know that all of our tests require a 'SiteConfig' object,
# we could create it here.
region = RegionFactory(nombre="C.A.B.A.")
jurisdiccion = JurisdiccionFactory(nombre="C.A.B.A.", region=region)
zona = ZonaFactory(nombre="Comunas 5 y 3", jurisdiccion=jurisdiccion)
ComercioFactory(zona=zona)
user_r = UserFactory()
relevador = PerfilFactory(rol=RELEVADOR, user=user_r, zonas=[zona])
user_cz = UserFactory()
coordinador_z = PerfilFactory(rol=COORD_ZONAL, user=user_cz,
zonas=[zona],)
user_cj = UserFactory()
coordinador_j = PerfilFactory(rol=COORD_JURISDICCIONAL, user=user_cj,
jurisdicciones=[jurisdiccion],)
user_cr = UserFactory()
coordinador_r = PerfilFactory(rol=COORD_REGIONAL, user=user_cr,
regiones=[region],)
user_cg = UserFactory()
coordinador_g = PerfilFactory(rol=COORD_GRAL, user=user_cg)
rubro = RubroFactory(nombre="Almacén")
planilla_modelo = PlanillaModeloFactory(nombre="Formulario de precios",
habilitada=True)
contador = 1
for nombre in ["Azúcar", "Aceite"]:
producto = ProductoGenericoFactory(nombre=nombre, rubro=rubro)
ProductoEnPlanillaFactory(producto_generico=producto,
planilla_modelo=planilla_modelo, orden=contador)
contador +=1
|
context.MARCA_POR_DEFECTO = "UnaMarca"
def after_all(context):
context.browser.quit()
context.browser = None
def after_step(context, step):
if step.status == "failed":
# -- SOLUTION: But note that step.exc_info | does not exist, yet.
import ipdb
ipdb.post_mortem(step.exc_traceback)
|
Teino1978-Corp/pycallgraph | pycallgraph/pycallgraph.py | Python | gpl-2.0 | 2,634 | 0 | import locale
from .output import Output
from .config import Config
from .tracer import AsyncronousTracer, SyncronousTracer
from .exceptions import PyCallGraphException
class PyCallGraph(object):
def __init__(self, output=None, config=None):
'''output can be a single Output instance or an iterable with many
of them. Example usage:
PyCallGraph(output=GraphvizOutput(), config=Config())
'''
locale.setlocale(locale.LC_ALL, '')
if output is None:
self.output = []
elif isinstance(output, Output):
self.output = [output]
else:
self.output = output
self.config = config or Config()
configured_ouput = self.config.get_output()
if configured_ouput:
self.output.append(configured_ouput)
self.reset()
def __enter__(self):
self.start()
def __exit__(self, type, value, traceback):
self.done()
def get_tracer_class(self):
if self.config.threaded:
return AsyncronousTracer
else:
return SyncronousTracer
def reset(self):
'''Resets all collected statistics. This is run automatically by
start(reset=True) and when the class is initialized.
'''
self.tracer = self.get_tracer_class()(self.output, config=self.config)
for output in self.output:
self.prepare_output(output)
def start(self, reset=True):
'''Begins a trace. Setting reset to True will reset all previously
recorded trace data.
'''
if not self.output:
raise PyCallGraphException(
'No outputs declared. Please see the '
'examples in the online documentation.'
)
if reset:
self.reset()
for output in self.output:
output.start()
self.tracer.start()
def stop(self):
'''Stops the currently running trace, if any.'''
self.tracer.stop()
def done(self):
'''Stops the trace and tells the outputters to generate their
output.
'''
self.stop()
self.generate()
def generate(self):
# If in threaded mode, wait for the processor thread to complete
self.tracer.done()
for output in self.output:
out | put.done()
def add_output(self, output):
self.output.append(output)
self.prepare_output(output)
def prepare_output(self, output):
output.sanity_check()
output.set_processor(self.tracer.processor)
output.r | eset()
|
seewindcn/tortoisehg | src/tortoisehg/hgqt/messageentry.py | Python | gpl-2.0 | 9,500 | 0.001368 | # messageentry.py - TortoiseHg's commit message editng widget
#
# Copyright 2011 Steve Borho <steve@borho.org>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2, incorporated herein by reference.
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.Qsci import QsciScintilla, QsciLexerMakefile
from tortoisehg.util.i18n import _
from tortoisehg.hgqt import qtlib, qscilib
import re
class MessageEntry(qscilib.Scintilla):
def __init__(self, parent, getCheckedFunc=None):
super(MessageEntry, self).__init__(parent)
self.setEdgeColor(QColor('LightSalmon'))
self.setEdgeMode(QsciScintilla.EdgeLine)
self.setReadOnly(False)
self.setMarginWidth(1, 0)
self.setFont(qtlib.getfont('fontcomment').font())
self.setCaretWidth(10)
self.setCaretLineBackgroundColor(QColor("#e6fff0"))
self.setCaretLineVisible(True)
self.setAutoIndent(True)
self.setAutoCompletionSource(QsciScintilla.AcsAPIs)
self.setAutoCompletionFillupsEnabled(True)
self.setMatchedBraceBackgroundColor(Qt.yellow)
self.setIndentationsUseTabs(False)
self.setBraceMatching(QsciScintilla.SloppyBraceMatch)
# http://www.riverbankcomputing.com/pipermail/qscintilla/2009-February/000461.html
self.setHorizontalScrollBarPolicy(Qt.ScrollBarAsNeeded)
self.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
# default message entry widgets to word wrap, user may override
self.setWrapMode(QsciScintilla.WrapWord)
self.getChecked = getCheckedFunc
self.setContextMenuPolicy(Qt.CustomContextMenu)
self.customContextMenuRequested.connect(self.menuRequested)
self.applylexer()
self._re_boundary = re.compile('[0-9i#]+\.|\(?[0-9i#]+\)|\(@\)')
def setText(self, text):
result = super(MessageEntry, self).setText(text)
self.setDefaultEolMode()
return result
def applylexer(self):
font = qtlib.getfont('fontcomment').font()
self.fontHeight = QFontMetrics(font).height()
if QSettings().valu | e('msgentry/lexer', True).toBool():
self.setLexer(QsciLexerMakefile(self))
self.lexer().setColor(QC | olor(Qt.red), QsciLexerMakefile.Error)
self.lexer().setFont(font)
else:
self.setLexer(None)
self.setFont(font)
@pyqtSlot(QPoint)
def menuRequested(self, point):
menu = self._createContextMenu(point)
menu.exec_(self.viewport().mapToGlobal(point))
menu.setParent(None)
def _createContextMenu(self, point):
line = self.lineAt(point)
lexerenabled = self.lexer() is not None
def apply():
firstline, firstcol, lastline, lastcol = self.getSelection()
if firstline < 0:
line = 0
else:
line = firstline
self.beginUndoAction()
while True:
line = self.reflowBlock(line)
if line is None or (line > lastline > -1):
break
self.endUndoAction()
def paste():
files = self.getChecked()
self.insert('\n'.join(sorted(files)))
def settings():
from tortoisehg.hgqt.settings import SettingsDialog
dlg = SettingsDialog(True, focus='tortoisehg.summarylen')
dlg.exec_()
def togglelexer():
QSettings().setValue('msgentry/lexer', not lexerenabled)
self.applylexer()
menu = self.createEditorContextMenu()
menu.addSeparator()
a = menu.addAction(_('Syntax Highlighting'))
a.setCheckable(True)
a.setChecked(lexerenabled)
a.triggered.connect(togglelexer)
menu.addSeparator()
if self.getChecked:
action = menu.addAction(_('Paste &Filenames'))
action.triggered.connect(paste)
for name, func in [(_('App&ly Format'), apply),
(_('C&onfigure Format'), settings)]:
def add(name, func):
action = menu.addAction(name)
action.triggered.connect(func)
add(name, func)
return menu
def refresh(self, repo):
self.setEdgeColumn(repo.summarylen)
self.setIndentationWidth(repo.tabwidth)
self.setTabWidth(repo.tabwidth)
self.summarylen = repo.summarylen
def reflowBlock(self, line):
lines = unicode(self.text()).splitlines()
if line >= len(lines):
return None
if not len(lines[line]) > 1:
return line+1
# find boundaries (empty lines or bounds)
def istopboundary(linetext):
# top boundary lines are those that begin with a Markdown style marker
# or are empty
if not linetext:
return True
if (linetext[0] in '#-*+'):
return True
if len(linetext) >= 2:
if linetext[:2] in ('> ', '| '):
return True
if self._re_boundary.match(linetext):
return True
return False
def isbottomboundary(linetext):
# bottom boundary lines are those that end with a period
# or are empty
if not linetext or linetext[-1] == '.':
return True
return False
def isanyboundary(linetext):
if len(linetext) >= 3:
if linetext[:3] in ('~~~', '```', '---', '==='):
return True
return False
b = line
while b and len(lines[b-1]) > 1:
linetext = lines[b].strip()
if istopboundary(linetext) or isanyboundary(linetext):
break
if b >= 1:
nextlinetext = lines[b - 1].strip()
if isbottomboundary(nextlinetext) \
or isanyboundary(nextlinetext):
break
b -= 1
e = line
while e+1 < len(lines) and len(lines[e+1]) > 1:
linetext = lines[e].strip()
if isbottomboundary(linetext) or isanyboundary(linetext):
break
nextlinetext = lines[e + 1].strip()
if isanyboundary(nextlinetext) or istopboundary(nextlinetext):
break
e += 1
if b == e == 0:
return line + 1
group = [lines[l] for l in xrange(b, e+1)]
MARKER = u'\033\033\033\033\033'
curlinenum, curcol = self.getCursorPosition()
if b <= curlinenum <= e:
# insert a "marker" at the cursor position
l = group[curlinenum - b]
group[curlinenum - b] = l[:curcol] + MARKER + l[curcol:]
firstlinetext = lines[b]
if firstlinetext:
indentcount = len(firstlinetext) - len(firstlinetext.lstrip())
firstindent = firstlinetext[:indentcount]
else:
indentcount = 0
firstindent = ''
parts = []
for l in group:
parts.extend(l.split())
outlines = []
line = []
partslen = indentcount - 1
newcurlinenum, newcurcol = b, 0
for part in parts:
if MARKER and MARKER in part:
# wherever the marker is found, that is where the cursor
# must be moved to after the reflow is done
newcurlinenum = b + len(outlines)
newcurcol = len(' '.join(line)) + 1 + part.index(MARKER)
part = part.replace(MARKER, '')
MARKER = None # there is no need to search any more
if not part:
continue
if partslen + len(line) + len(part) + 1 > self.summarylen:
if line:
linetext = ' '.join(line)
if len(outlines) == 0 and firstindent:
linetext = firstindent + linetext
outlines.append(linetext)
line, partslen = [], 0
line.appen |
shimpe/frescobaldi | frescobaldi_app/qpopplerview/locking.py | Python | gpl-2.0 | 1,381 | 0.002896 | # This file is part of the qpopplerview package.
#
# Copyright (c) 2010 - 2014 by Wilbert Berendsen
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
# See http://www.gnu.org/licenses/ for more information.
"""
Manages locking access (across threads) to Poppl | er.Document inst | ances.
"""
import threading
import weakref
_locks = weakref.WeakKeyDictionary()
_lock = threading.RLock()
def lock(document):
"""Returns a threading.RLock instance for the given Poppler.Document.
Use:
with lock(document):
do_something
"""
with _lock:
try:
return _locks[document]
except KeyError:
res = _locks[document] = threading.RLock()
return res
|
AnnalisaS/migration_geonode | geonode/base/models.py | Python | gpl-3.0 | 21,558 | 0.005566 | from datetime import datetime
import os
import hashlib
from django.db import models
from django.db.models import Q
from django.contrib.auth.models import User
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError, ObjectDoesNotExist
from django.core.files.base import ContentFile
from django.conf import settings
from django.contrib.staticfiles.templatetags import staticfiles
from geonode.base.enumerations import ALL_LANGUAGES, \
HIERARCHY_LEVELS, UPDATE_FREQUENCIES, \
DEFAULT_SUPPLEMENTAL_INFORMATION, LINK_TYPES
from geonode.utils import bbox_to_wkt
from geonode.people.models import Profile, Role
from geonode.security.models import PermissionLevelMixin
from taggit.managers import TaggableManager
def get_default_category():
if settings.DEFAULT_TOPICCATEGORY:
try:
return TopicCategory.objects.get(identifier=settings.DEFAULT_TOPICCATEGORY)
except TopicCategory.DoesNotExist:
raise TopicCategory.DoesNotExist('The default TopicCategory indicated in settings is not found.')
else:
return TopicCategory.objects.all()[0]
class ContactRole(models.Model):
"""
ContactRole is an intermediate abstract model to bind Profiles as Contacts to Layers and apply roles.
"""
resource = models.ForeignKey('ResourceBase')
contact = models.ForeignKey(Profile)
role = models.ForeignKey(Role)
def clean(self):
"""
Make sure there is only one poc and author per resource
"""
if (self.role == self.resource.poc_role) or (self.role == self.resource.metadata_author_role):
contacts = self.resource.contacts.filter(contactrole__role=self.role)
if contacts.count() == 1:
# only allow this if we are updating the same contact
if self.contact != contacts.get():
raise ValidationError('There can be only one %s for a given resource' % self.role)
if self.contact.user is None:
# verify that any unbound contact is only associated to one resource
bounds = ContactRole.objects.filter(contact=self.contact).count()
if bounds > 1:
raise ValidationError('There can be one and only one resource linked to an unbound contact' % self.role)
elif bounds == 1:
# verify that if there was one already, it corresponds to this instance
if ContactRole.objects.filter(contact=self.contact).get().id != self.id:
raise ValidationError('There can be one and only one resource linked to an unbound contact' % self.role)
class Meta:
unique_together = (("contact", "resource", "role"),)
class TopicCategory(models.Model):
"""
Metadata about high-level geographic data thematic classification.
It should reflect a list of codes from TC211
See: http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml
<CodeListDictionary gml:id="MD_MD_TopicCategoryCode">
"""
identifier = models.CharField(max_length=255, default='location')
description = models.TextField()
gn_description = models.TextField('GeoNode description', default='', null=True)
is_choice = models.BooleanField(default=True)
def __unicode__(self):
return u"{0}".format(self.gn_description)
class Meta:
ordering = ("identifier",)
verbose_name_plural = 'Metadata Topic Categories'
class SpatialRepresentationType(models.Model):
"""
Metadata information about the spatial representation type.
It should reflect a list of codes from TC211
See: http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml
<CodeListDictionary gml:id="MD_SpatialRepresentationTypeCode">
"""
identifier = models.CharField(max_length=255, editable=False)
description = models.CharField(max_length=255, editable=False)
gn_description = models.CharField('GeoNode description', max_length=255)
is_choice = models.BooleanField(default=True)
def __unicode__(self):
return self.gn_description
class Meta:
ordering = ("identifier",)
verbose_name_plural = 'Metadata Spatial Representation Types'
class Region(models.Model):
code = models.CharField(max_length=50)
name = models.CharField(max_length=255)
def __unicode__(self):
return self.name
class Meta:
ordering = ("name",)
verbose_name_plural = 'Metadata Regions'
class RestrictionCodeType(models.Model):
"""
Metadata information about the spatial representation type.
It should reflect a list of codes from TC211
See: http://www.isotc211.org/2005/resources/Codelist/gmxCodelists.xml
<CodeListDictionary gml:id="MD_RestrictionCode">
"""
identifier = models.CharField(max_length=255, editable=False)
description = models.TextField(max_length=255, editable=False)
gn_description = models.TextField('GeoNode description', max_length=255)
is_choice = models.BooleanField(default=True)
def __unicode__(self):
return self.gn_description
class Meta:
ordering = ("identifier",)
verbose_name_plural = 'Metadata Restriction Code Types'
class Thumbnail(models.Model):
thumb_file = models.FileField(upload_to='thumbs')
thumb_spec = models.TextField(null=True, blank=True)
version = models.PositiveSmallIntegerField(null=True, default=0)
def save_thumb(self, image, id):
"""image must be png data in a string for now"""
self._delete_thumb()
md5 = hashlib.md5()
md5.update(id + str(self.version))
self.version = self.version + 1
self.thumb_file.save(md5.hexdigest() + ".png", ContentFile(image))
def _delete_thumb(self):
try:
self.thumb_file.delete()
except OSError:
pass
def delete(self):
self._delete_thumb()
super(Thumbnail,self).delete()
class ThumbnailMixin(object):
"""
Add Thumbnail management behavior. The model must declared a field
named thumbnail.
"""
def save_thumbnail(self, spec, save=True):
"""
Generic support for saving. `render` implementation must exist
and return image as bytes of a png image (for now)
"""
render = getattr(self, '_render_thumbnail', None)
if render is None:
raise Exception('Must have _render_thumbnail(spec) function')
image = render(spec)
#Clean any orphan Thumbnail before
Thumbnail.objects.filter(resourcebase__id=None).delete()
self.thumbnail, created = Thumbnail.objects.get_or_create(resourcebase__id=self.id)
path = self._thumbnail_path()
self.thumbnail.thumb_spec = spec
self.thumbnail.save_thumb(image, path)
# have to save the thumb ref if new but also trigger XML regeneration
if save:
self.save()
def _thumbnail_path(self):
return '%s-%s' % (self._meta.object_name, self.pk)
def _get_default_thumbn | ail(self):
return getattr(self, "_missing_thumbnail", staticfiles.static(settings.MISSING_THUMB | NAIL))
def get_thumbnail_url(self):
thumb = self.thumbnail
return thumb == None and self._get_default_thumbnail() or thumb.thumb_file.url
def has_thumbnail(self):
'''Determine if the thumbnail object exists and an image exists'''
thumb = self.thumbnail
return os.path.exists(self._thumbnail_path()) if thumb else False
class ResourceBaseManager(models.Manager):
def admin_contact(self):
# this assumes there is at least one superuser
superusers = User.objects.filter(is_superuser=True).order_by('id')
if superusers.count() == 0:
raise RuntimeError('GeoNode needs at least one admin/superuser set')
contact = Profile.objects.get_or_create(user=superusers[0],
defaults={"name": "Geonode Admin"})[0]
return contact
class License(models.Model):
name = models.CharField(max_length=100)
description = models.TextFiel |
valeriog-crytek/glad | glad/lang/c/generator.py | Python | mit | 9,993 | 0.005404 | import os
import sys
from glad.lang.common.generator import Generator
from glad.lang.common.util import makefiledir
if sys.version_info >= (3, 0):
from urllib.request import urlretrieve
else:
from urllib import urlretrieve
KHRPLATFORM = 'https://www.khronos.org/registry/egl/api/KHR/khrplatform.h'
class CGenerator(Generator):
def open(self):
suffix = ''
if not self.spec.NAME == 'gl':
suffix = '_{}'.format(self.spec.NAME)
self.h_include = '<glad/glad{}.h>'.format(suffix)
self._f_c = open(make_path(self.path, 'src',
'glad{}.c'.format(suffix)), 'w')
self._f_h = open(make_path(self.path, 'include',
'glad', 'glad{}.h'.format(suffix)), 'w')
khr = os.path.join(self.path, 'include', 'KHR')
khrplatform = os.path.join(khr, 'khrplatform.h')
if not os.path.exists(khrplatform):
if not os.path.exists(khr):
os.makedirs(khr)
urlretrieve(KHRPLATFORM, khrplatform)
return self
def close(self):
self._f_c.close()
self._f_h.close()
def generate_loader(self, features, extensions):
f = self._f_c
if self.spec.NAME in ('egl', 'wgl'):
features = {'egl' : [], 'wgl' : []}
written = set()
for api, version in self.api.items():
for feature in features[api]:
f.write('static void load_{}(GLADloadproc load) {{\n'
.format(feature.name))
if self.spec.NAME in ('gl','glx','wgl'):
f.write('\tif(!GLAD_{}) return;\n'.format(feature.name))
for func in feature.functions:
f.write('\tglad_{0} = (PFN{1}PROC)load("{0}");\n'
.format(func.proto.name, func.proto.name.upper()))
f.write('}\n')
for ext in extensions[api]:
if len(list(ext.functions)) == 0 or ext.name in written:
continue
f.write('static void load_{}(GLADloadproc load) {{\n'
.format(ext.name))
if self.spec.NAME in ('gl','glx','wgl'):
f.write('\tif(!GLAD_{}) return;\n'.format(ext.name))
if ext.name == 'GLX_SGIX_video_source': f.write('#ifdef _VL_H_\n')
if ext.name == 'GLX_SGIX_dmbuffer': f.write('#ifdef _DM_BUFFER_H_\n')
for func in ext.functions:
# even if they were in written we need to load it
f.write('\tglad_{0} = (PFN{1}PROC)load("{0}");\n'
.format(func.proto.name, func.proto.name.upper()))
if ext.name in ('GLX_SGIX_video_source', 'GLX_SGIX_dmbuffer'):
f.write('#else\n')
f.write('\t(void)load;\n')
f.write('#endif\n')
f.write('}\n')
written.add(ext.name)
f.write('static void find_extensions{}(void) {{\n'.format(api.upper()))
if self.spec.NAME in ('gl','glx','wgl'):
for ext in extensions[api]:
f.write('\tGLAD_{0} = has_ext("{0}");\n'.format(ext.name))
f.write('}\n\n')
if api == 'glx':
f.write('static void find_core{}(Display *dpy, int screen) {{\n'.format(api.upper()))
elif api == 'wgl':
f.write('static void find_core{}(HDC hdc) {{\n'.format(api.upper()))
else:
f.write('static void find_core{}(void) {{\n'.format(api.upper()))
self.loader.write_find_core(f)
if self.spec.NAME in ('gl','glx','wgl'):
for feature in features[api]:
f.write('\tGLAD_{} = (major == {num[0]} && minor >= {num[1]}) ||'
' major > {num[0]};\n'.format(feature.name, num=feature.num | ber))
f.write('}\n\n')
if api == 'glx':
f.write('int gladLoad{}Loader(GLADloadproc load, Display *dpy, int screen) {{\n'.format(api.upper()))
elif api == 'wgl':
f.write('int gladLoad{}Loader(GLADloadproc load, HDC hdc) {{\n'.format(api.upper()))
else:
f.write('int gladLoad{}Loader(GLADloadproc load) | {{\n'.format(api.upper()))
self.loader.write_begin_load(f)
if api == 'glx':
f.write('\tfind_core{}(dpy, screen);\n'.format(api.upper()))
elif api == 'wgl':
f.write('\tfind_core{}(hdc);\n'.format(api.upper()))
else:
f.write('\tfind_core{}();\n'.format(api.upper()))
for feature in features[api]:
f.write('\tload_{}(load);\n'.format(feature.name))
f.write('\n\tfind_extensions{}();\n'.format(api.upper()))
for ext in extensions[api]:
if len(list(ext.functions)) == 0:
continue
f.write('\tload_{}(load);\n'.format(ext.name))
self.loader.write_end_load(f)
f.write('}\n\n')
self.loader.write_header_end(self._f_h)
def generate_types(self, types):
f = self._f_h
self.loader.write_header(f)
for api in self.api:
if api == 'glx':
f.write('GLAPI int gladLoad{}Loader(GLADloadproc, Display *dpy, int screen);\n\n'.format(api.upper()))
elif api == 'wgl':
f.write('GLAPI int gladLoad{}Loader(GLADloadproc, HDC hdc);\n\n'.format(api.upper()))
else:
f.write('GLAPI int gladLoad{}Loader(GLADloadproc);\n\n'.format(api.upper()))
for type in types:
if not self.spec.NAME in ('egl',) and 'khronos' in type.raw:
continue
f.write((type.raw + '\n').lstrip().replace(' ', ' '))
def generate_features(self, features):
f = self._f_h
write = set()
if self.spec.NAME in ('wgl',):
# These are already defined in windows.h
pass
elif self.spec.NAME in ('egl',):
for feature in features:
for func in feature.functions:
self.write_function_def(f, func)
else:
self.write_functions(f, write, set(), features)
f = self._f_c
f.write('#include <stdio.h>\n#include <string.h>\n#include {}\n'.format(self.h_include))
self.loader.write(f, self.api.keys())
self.loader.write_has_ext(f)
if self.spec.NAME in ('gl','glx','wgl'):
for feature in features:
f.write('int GLAD_{};\n'.format(feature.name))
for func in write:
self.write_function(f, func)
def generate_extensions(self, extensions, enums, functions):
write = set()
written = set(enum.name for enum in enums) | \
set(function.proto.name for function in functions)
f = self._f_h
self.write_functions(f, write, written, extensions)
f = self._f_c
if self.spec.NAME in ('gl','glx','wgl'):
for ext in set(ext.name for ext in extensions):
f.write('int GLAD_{};\n'.format(ext))
written = set()
for ext in extensions:
if ext.name == 'GLX_SGIX_video_source': f.write('#ifdef _VL_H_\n')
if ext.name == 'GLX_SGIX_dmbuffer': f.write('#ifdef _DM_BUFFER_H_\n')
for func in ext.functions:
if func in write and func not in written:
self.write_function(f, func)
written.add(func)
if ext.name in ('GLX_SGIX_video_source', 'GLX_SGIX_dmbuffer'): f.write('#endif\n')
def write_functions(self, f, write, written, extensions):
for ext in extensions:
for enum in ext.enums:
if not enum.name in written:
f.write('#define {} {}\n'.format(enum.name, enum.value))
written.add(enum.name)
for ext in extensions:
f.write('#ifndef {0}\n#define {0} 1\n'.format(ext.name))
if self.spec. |
showa-yojyo/note | source/_sample/scipy/kdtree.py | Python | mit | 629 | 0 | #!/usr/bin/env python
"""kdtree.py: Demonstrate cl | ass KDTree of SciPy.
"""
import numpy as np
from scipy.spatial import KDTree
# pylint: disable=invalid-name
# Genrate 3D points: (0, 0, 0), | (0, 0, 10), (0, 0, 20), ...
x, y, z = np.mgrid[0:100:10, 0:100:10, 0:100:10]
points = list(zip(x.ravel(), y.ravel(), z.ravel()))
# Construct a KDTree.
tree = KDTree(points)
# A target point included in [0, 100) * [0, 100) * [0, 100).
target = [43.831, 54.762, 83.131]
print(f"Target: {target}")
# Query for the closest point.
dist, index = tree.query(target, eps=0.01)
print(f"Closest: {tree.data[index]}")
print(f"Distance: {dist}")
|
crisely09/horton | horton/espfit/test/test_mask.py | Python | gpl-3.0 | 3,493 | 0.004867 | # -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2016 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
import numpy as np
from horton import * # pylint: disable=wildcard-import,unused-wildcard-import
def get_fake_system():
coordinates = np.array([[0.0, 1.5, 2.3], [-0.1, 1.1, 0.0], [2.0, 1.0, 0.0], [-1.0, 1.0, 1.1]])
numbers = np.array([1, 1, 2, 2])
origin = np.array([1.0, 0.0, 1.0])
grid_rvecs = np.array([[0.15, 0.0, 0.0], [0.0, 0.20, 0.01], [0.01, 0.01, 0.15]])
shape = np.array([10, 10, 20])
pbc = np.array([1, 0, 1])
ugrid = UniformGrid(origin, grid_rvecs, shape, pbc)
return coordinates, numbers, ugrid
def test_mask_dens():
coordinates, numbers, ugrid = get_fake_system()
rho = np.zeros(ugrid.shape)
scan = np.arange(-2.0, -0.0001, 0.1)
rho[0,0,:] = 10**scan
weights = setup_weights(coordinates, numbers, ugrid, dens=(rho, -9, 0.8))
assert (weights[1,:,:] == 0.0).all()
assert abs(weights[0,0,:] - np.exp(-0.8*(np.log(rho[0,0,:])-(-9))**2)).max() < 1e-10
def test_mask_near1():
coordinates, numbers, ugrid = get_fake_system()
weights = setup_weights(coordinates, numbers, ugrid, near={1: (0.5, 0.5), 2: (1.0, 0.2)})
assert (weights >= 0.0).all()
assert (weights <= 1.0).all()
# find the point close to atom 2 and check that the weight is zero
grid_cell = ugrid.get_gr | id_cell()
i = np.round(grid_cell.to_frac(coordinates[2] - ugrid.origin)).astype(int)
i[0] = i[0]%10
i[2] = i[2]%20
assert weights[i[0], i[1], i[ | 2]] == 0.0
def test_mask_near2():
coordinates, numbers, ugrid = get_fake_system()
weights = setup_weights(coordinates, numbers, ugrid, near={1: (0.5, 0.5), 2: (1.0, 0.2)})
weights1 = setup_weights(coordinates, numbers, ugrid, near={1: (0.5, 0.5)})
weights2 = setup_weights(coordinates, numbers, ugrid, near={2: (1.0, 0.2)})
assert abs(weights - weights1*weights2).max() < 1e-10
def test_mask_near3():
coordinates, numbers, ugrid = get_fake_system()
weights = setup_weights(coordinates, numbers, ugrid, near={0: (0.5, 0.5)})
weights1 = setup_weights(coordinates, numbers, ugrid, near={1: (0.5, 0.5)})
weights2 = setup_weights(coordinates, numbers, ugrid, near={2: (0.5, 0.5)})
assert abs(weights - weights1*weights2).max() < 1e-10
def test_mask_far():
coordinates, numbers, ugrid = get_fake_system()
weights = setup_weights(coordinates, numbers, ugrid, far=(1.0, 0.5))
assert (weights >= 0.0).all()
assert (weights <= 1.0).all()
# find the point close to atom 2 and check that the weight is one
grid_cell = ugrid.get_grid_cell()
i = np.round(grid_cell.to_frac(coordinates[2] - ugrid.origin)).astype(int)
i[0] = i[0]%10
i[2] = i[2]%20
assert weights[i[0], i[1], i[2]] == 1.0
|
wilywampa/python-mode | pymode/libs/pylama/async.py | Python | lgpl-3.0 | 1,615 | 0 | """Support for asyncronious checking."""
import logging
import threading
try:
import Queue
except ImportError:
import queue as Queue
try:
import multiprocessing
CPU_COUNT = multiprocessing.cpu_count()
except (ImportError, NotImplementedError):
CPU_COUNT = 1
from .core import run
LOGGER = logging.getLogger('pylama')
class Worker(threading.Thread):
"""Get tasks from queue and run."""
def __init__(self, path_queue, result_queue):
""" Init worker. """
threading.Thread.__init__(self)
self.path_queue = path_queue
self.result_queue = result_queue
def run(self):
""" Run tasks from queue. """
while True:
path, params = self.path_queue.get()
errors = run(path, **params)
self.resu | lt_queue.put(errors)
self.path_queue.task_done()
def check_async(paths, options, rootdir=None):
"""Check given paths asynchronously.
:return list: list of errors
"""
LOGGER.info('Async code checking is enabled.' | )
path_queue = Queue.Queue()
result_queue = Queue.Queue()
for num in range(CPU_COUNT):
worker = Worker(path_queue, result_queue)
worker.setDaemon(True)
LOGGER.info('Start worker #%s', (num + 1))
worker.start()
for path in paths:
path_queue.put((path, dict(options=options, rootdir=rootdir)))
path_queue.join()
errors = []
while True:
try:
errors += result_queue.get(False)
except Queue.Empty:
break
return errors
# pylama:ignore=W0212,D210,F0001
|
nkgeorgiev/tarator | old/modules/DHT/opendht/python/tools/benchmark.py | Python | gpl-3.0 | 9,414 | 0.006694 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (C) 2015-2016 Savoir-faire Linux Inc.
# Author(s): Adrien Béraud <adrien.beraud@savoirfairelinux.com>
# Simon Désaulniers <sim.desaulniers@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; If not, see <http://www.gnu.org/licenses/>.
import os
import sys
import subprocess
import signal
import argparse
import time
import random
from dht.network import DhtNetwork
from dht.network import DhtNetworkSubProcess
from dht.tests import PerformanceTest, PersistenceTest
from dht import virtual_network_builder
from dht import network as dhtnetwork
from opendh | t import *
class WorkBench():
"""
This contains the initialisation information, such as ipv4/ipv6, number of
nodes and cluster to create, etc. This class is also used to initialise and
finish the network.
"""
def __init__(self, ifname='ethdht', virtual_locs=8, node_num=32, remote_bootstrap=None, loss=0, delay=0, disable_ipv4=False,
disable_ipv6=False):
self.ifname = ifname
self.virtual_locs = virtu | al_locs
self.node_num = node_num
self.clusters = min(virtual_locs, node_num)
self.node_per_loc = int(self.node_num / self.clusters)
self.loss = loss
self.delay = delay
self.disable_ipv4 = disable_ipv4
self.disable_ipv6 = disable_ipv6
self.remote_bootstrap = remote_bootstrap
self.local_bootstrap = None
self.bs_port = "5000"
self.procs = [None for _ in range(self.clusters)]
def get_bootstrap(self):
if not self.local_bootstrap:
self.local_bootstrap = DhtNetwork(iface='br'+self.ifname,
first_bootstrap=False if self.remote_bootstrap else True,
bootstrap=[(self.remote_bootstrap, self.bs_port)] if self.remote_bootstrap else [])
return self.local_bootstrap
def create_virtual_net(self):
if self.virtual_locs > 1:
cmd = ["python3", os.path.abspath(virtual_network_builder.__file__), "-i", self.ifname, "-n", str(self.clusters), '-l', str(self.loss), '-d', str(self.delay)]
if not self.disable_ipv4:
cmd.append('-4')
if not self.disable_ipv6:
cmd.append('-6')
print(cmd)
p = subprocess.Popen(cmd, stdout=subprocess.PIPE)
output, err = p.communicate()
print(output.decode())
def destroy_virtual_net(self):
print('Shuting down the virtual IP network.')
subprocess.call(["python3", os.path.abspath(virtual_network_builder.__file__), "-i", self.ifname, "-n", str(self.clusters), "-r"])
def start_cluster(self, i):
if self.local_bootstrap:
cmd = ["python3", os.path.abspath(dhtnetwork.__file__), "-n", str(self.node_per_loc), '-I', self.ifname+str(i)+'.1']
if self.remote_bootstrap:
cmd.extend(['-b', self.remote_bootstrap, '-bp', "5000"])
else:
if not self.disable_ipv4 and self.local_bootstrap.ip4:
cmd.extend(['-b', self.local_bootstrap.ip4])
if not self.disable_ipv6 and self.local_bootstrap.ip6:
cmd.extend(['-b6', self.local_bootstrap.ip6])
self.procs[i] = DhtNetworkSubProcess('node'+str(i), cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
while DhtNetworkSubProcess.NOTIFY_TOKEN not in self.procs[i].getline():
# waiting for process to spawn
time.sleep(0.5)
else:
raise Exception('First create bootstrap.')
def stop_cluster(self, i):
"""
Stops a cluster sub process. All nodes are put down without graceful
shutdown.
"""
if self.procs[i]:
try:
self.procs[i].quit()
except Exception as e:
print(e)
self.procs[i] = None
def replace_cluster(self):
"""
Same as stop_cluster(), but creates a new cluster right after.
"""
n = random.randrange(0, self.clusters)
self.stop_cluster(n)
self.start_cluster(n)
def resize_clusters(self, n):
"""
Resizes the list of clusters to be of length ``n``.
"""
procs_count = len(self.procs)
if procs_count < n:
for i in range(n-procs_count):
self.procs.append(None)
self.start_cluster(procs_count+i)
else:
for i in range(procs_count-n):
self.stop_cluster(procs_count-i-1)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Run, test and benchmark a '\
'DHT network on a local virtual network with simulated packet '\
'loss and latency.')
ifConfArgs = parser.add_argument_group('Virtual interface configuration')
ifConfArgs.add_argument('-i', '--ifname', default='ethdht', help='interface name')
ifConfArgs.add_argument('-n', '--node-num', type=int, default=32, help='number of dht nodes to run')
ifConfArgs.add_argument('-v', '--virtual-locs', type=int, default=8,
help='number of virtual locations (node clusters)')
ifConfArgs.add_argument('-l', '--loss', type=int, default=0, help='simulated cluster packet loss (percent)')
ifConfArgs.add_argument('-d', '--delay', type=int, default=0, help='simulated cluster latency (ms)')
ifConfArgs.add_argument('-b', '--bootstrap', default=None, help='Bootstrap node to use (if any)')
ifConfArgs.add_argument('-no4', '--disable-ipv4', action="store_true", help='Enable IPv4')
ifConfArgs.add_argument('-no6', '--disable-ipv6', action="store_true", help='Enable IPv6')
testArgs = parser.add_argument_group('Test arguments')
testArgs.add_argument('--bs-dht-log', action='store_true', default=False, help='Enables dht log in bootstrap.')
testArgs.add_argument('-t', '--test', type=str, default=None, required=True, help='Specifies the test.')
testArgs.add_argument('-o', '--opt', type=str, default=[], nargs='+',
help='Options passed to tests routines.')
testArgs.add_argument('-m', type=int, default=None, help='Generic size option passed to tests.')
testArgs.add_argument('-e', type=int, default=None, help='Generic size option passed to tests.')
featureArgs = parser.add_mutually_exclusive_group(required=True)
featureArgs.add_argument('--performance', action='store_true', default=False,
help='Launches performance benchmark test. Available args for "-t" are: gets.')
featureArgs.add_argument('--data-persistence', action='store_true', default=0,
help='Launches data persistence benchmark test. '\
'Available args for "-t" are: delete, replace, mult_time. '\
'Available args for "-o" are : dump_str_log, keep_alive, trigger, traffic_plot, op_plot. '\
'Use "-m" to specify the number of producers on the DHT.'\
'Use "-e" to specify the number of values to put on the DHT.')
args = parser.parse_args()
test_opt = { o : True for o in args.opt }
wb = WorkBench(args.ifname, args.virtual_locs, args.node_num, loss=args.loss,
delay=args.delay, disable_ipv4=args.disable_ipv4,
disable_ipv6=args.disable_ipv6)
bootstrap = wb.get_bootstrap()
bs_dht_log_enabled = False
def toggle_bs_dht_log(signum, frame):
global bs_dht_log_enabled, bootstrap
if bs_dht_log_enabled:
bootstrap.front().disableLoggin |
EterniusVGM/Renju | competition/backend.py | Python | mit | 235 | 0.008511 | import renju
import sys
|
def wait_for_game_update():
data = sys.stdin.buffer.readline().rstrip()
return renju.Game.loads(data.decode())
def move(move):
sys.stdout.buffer.write(move.encode() + b'\n')
sys.stdout.flush | ()
|
bwasti/caffe2 | caffe2/python/operator_test/momentum_sgd_test.py | Python | apache-2.0 | 3,891 | 0 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from caffe2.python import core
import caffe2.python.hypothesis_test_util as hu
import hypothesis
from hypothesis import given
import hypothesis.strategies as st
import numpy as np
import unittest
class TestMomentumSGD(hu.HypothesisTestCase):
@given(n=st.integers(4, 8), **hu.gcs)
def test_momentum_sgd(self, n, gc, dc):
param = np.random.rand(n).astype(np.float32)
grad = np.random.rand(n).astype(np.float32)
lr = np.random.rand(1).astype(np.float32)
param_momentum = np.random.rand(n).astype(np.float32)
momentum = 0.9
def momentum_sgd(grad, param_momentum, lr, param=None):
adjgrad = lr * grad + momentum * param_momentum
if param is None:
return [adjgrad, adjgrad]
else:
paramup = param - adjgrad
return [adjgrad, adjgrad, paramup]
op = core.CreateOperator(
"MomentumSGDUpdate",
["grad", "param_momentum", "lr", "param"],
["grad", "param_momentum", "param"],
momentum=momentum,
nesterov=0,
)
self.assertReferenceChecks(
device_option=gc,
op=op,
inputs=[grad, param_momentum, lr, param],
reference=momentum_sgd
)
op_noparam = core.CreateOperator(
"MomentumSGD",
["grad", "param_momentum", "lr"],
["grad", "param_momentum"],
momentum=momentum,
nesterov=0,
)
self.assertReferenceChecks(
device_option=gc,
op=op_noparam,
inputs=[grad, param_momentum, lr],
reference=momentum_sgd
)
@given(inputs=hu.tensors(n=3),
momentum=st.floats(min_value=0.1, max_value=0.9),
nesterov=st.booleans(),
lr=st.floats(min_value=0.1, max_value=0.9),
data_strategy=st.data(),
**hu.gcs)
def test_sparse_momentum_sgd(
self, inputs, momentum, nesterov, lr, data_strategy, gc, dc):
w, grad, m = inputs
# Create an indexing array containing values which index into grad
indices = data_strategy.draw(
hu.tensor(dtype=np.int64,
elements=st.sampled_from(np.arange(grad.shape[0]))),
)
hypothesis.note('indices.shape: %s' % str(indices.shape))
# For now, the indices must be unique
hypothesis.assume(np.array_equal(np.unique(indices.flatten()),
np.sort(indices.flatten())))
# Sparsify grad
grad = grad[indices]
# Make momentum >= 0
m = np.abs(m)
# Convert lr to a numpy array
lr = np.asarray([lr], dtype=np.float32)
op = core.CreateOperator(
"SparseMomentumSGDUpdate",
["grad", "m", "lr", "param", "indices"],
["adjusted_grad", "m", "param"],
momentum=momentum,
nesterov=int(nesterov),
device_option=gc)
# Reference
def momentum_sgd(grad, m, lr):
lr = lr[0]
if not nesterov:
adjusted_gradient = lr * grad + momentum * m
return (adjusted_gradient, adjusted_gradient)
else:
m_new = momentum * m + lr * grad
return ((1 + momentum) * m_new - momentum * m, m_new)
def sparse(grad, m, lr, param, i):
grad_new, m_ne | w = momentum_sgd(grad, m[i], lr)
m[i] = m_new
| param[i] -= grad_new
return (grad_new, m, param)
self.assertReferenceChecks(gc, op, [grad, m, lr, w, indices], sparse)
if __name__ == "__main__":
unittest.main()
|
zackmdavis/python-swiftclient | tests/test_multithreading.py | Python | apache-2.0 | 14,131 | 0 | # Copyright (c) 2010-2013 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import time
try:
from unittest import mock
except ImportError:
import mock
import testtools
import threading
import six
from six.moves.queue import Queue, Empty
from swiftclient import multithreading as mt
from swiftclient.exceptions import ClientException
class ThreadTestCase(testtools.TestCase):
def setUp(self):
super(ThreadTestCase, self).setUp()
self.got_args_kwargs = Queue()
self.starting_thread_count = threading.active_count()
def _func(self, q_item, *args, **kwargs):
self.got_items.put(q_item)
self.got_args_kwargs.put((args, kwargs))
if q_item == 'go boom':
raise Exception('I went boom!')
if q_item == 'c boom':
raise ClientException(
'Client Boom', http_scheme='http', http_host='192.168.22.1',
http_port=80, http_path='/booze', http_status=404,
http_reason='to much', http_response_content='no sir!')
return 'best result EVAR!'
def assertQueueContains(self, queue, expected_contents):
got_contents = []
try:
while True:
got_contents.append(queue.get(timeout=0.1))
except Empty:
pass
if isinstance(expected_contents, set):
got_contents = set(got_contents)
self.assertEqual(expected_contents, got_contents)
class TestQueueFunctionThread(ThreadTestCase):
def setUp(self):
super(TestQueueFunctionThread, self).setUp()
self.input_queue = Queue()
self.got_items = Queue()
self.stored_results = []
self.qft = mt.QueueFunctionThread(self.input_queue, self._func,
'one_arg', 'two_arg',
red_fish='blue_arg',
store_results=self.stored_results)
self.qft.start()
def tearDown(self):
if self.qft.is_alive():
self.finish_up_thread()
super(TestQueueFunctionThread, self).tearDown()
def finish_up_thread(self):
self.input_queue.put(mt.StopWorkerThreadSignal())
while self.qft.is_alive():
time.sleep(0.05)
def test_plumbing_and_store_results(self):
self.input_queue.put('abc')
self.input_queue.put(123)
self.finish_up_thread()
self.assertQueueContains(self.got_items, ['abc', 123])
self.assertQueueContains(self.got_args_kwargs, [
(('one_arg', 'two_arg'), {'red_fish': 'blue_arg'}),
(('one_arg', 'two_arg'), {'red_fish': 'blue_arg'})])
self.assertEqual(self.stored_results,
['best result EVAR!', 'best result EVAR!'])
def test_exception_handling(self):
self.input_queue.put('go boom')
self.input_queue.put('ok')
self.input_queue.put('go boom')
self.finish_up_thread()
self.assertQueueContains(self.got_items,
['go boom', 'ok', 'go boom'])
self.assertEqual(len(self.qft.exc_infos), 2)
self.assertEqual(Exception, self.qft.exc_infos[0][0])
self. | assertEqual(Exception, self.qft.exc_infos[1][0])
self.assertEqua | l(('I went boom!',), self.qft.exc_infos[0][1].args)
self.assertEqual(('I went boom!',), self.qft.exc_infos[1][1].args)
class TestQueueFunctionManager(ThreadTestCase):
def setUp(self):
super(TestQueueFunctionManager, self).setUp()
self.thread_manager = mock.create_autospec(
mt.MultiThreadingManager, spec_set=True, instance=True)
self.thread_count = 4
self.error_counter = [0]
self.got_items = Queue()
self.stored_results = []
self.qfq = mt.QueueFunctionManager(
self._func, self.thread_count, self.thread_manager,
thread_args=('1arg', '2arg'),
thread_kwargs={'a': 'b', 'store_results': self.stored_results},
error_counter=self.error_counter,
connection_maker=self.connection_maker)
def connection_maker(self):
return 'yup, I made a connection'
def test_context_manager_without_error_counter(self):
self.qfq = mt.QueueFunctionManager(
self._func, self.thread_count, self.thread_manager,
thread_args=('1arg', '2arg'),
thread_kwargs={'a': 'b', 'store_results': self.stored_results},
connection_maker=self.connection_maker)
with self.qfq as input_queue:
self.assertEqual(self.starting_thread_count + self.thread_count,
threading.active_count())
input_queue.put('go boom')
self.assertEqual(self.starting_thread_count, threading.active_count())
error_strs = list(map(str, self.thread_manager.error.call_args_list))
self.assertEqual(1, len(error_strs))
self.assertTrue('Exception: I went boom!' in error_strs[0])
def test_context_manager_without_conn_maker_or_error_counter(self):
self.qfq = mt.QueueFunctionManager(
self._func, self.thread_count, self.thread_manager,
thread_args=('1arg', '2arg'), thread_kwargs={'a': 'b'})
with self.qfq as input_queue:
self.assertEqual(self.starting_thread_count + self.thread_count,
threading.active_count())
for i in range(20):
input_queue.put('slap%d' % i)
self.assertEqual(self.starting_thread_count, threading.active_count())
self.assertEqual([], self.thread_manager.error.call_args_list)
self.assertEqual(0, self.error_counter[0])
self.assertQueueContains(self.got_items,
set(['slap%d' % i for i in range(20)]))
self.assertQueueContains(
self.got_args_kwargs,
[(('1arg', '2arg'), {'a': 'b'})] * 20)
self.assertEqual(self.stored_results, [])
def test_context_manager_with_exceptions(self):
with self.qfq as input_queue:
self.assertEqual(self.starting_thread_count + self.thread_count,
threading.active_count())
for i in range(20):
input_queue.put('item%d' % i if i % 2 == 0 else 'go boom')
self.assertEqual(self.starting_thread_count, threading.active_count())
error_strs = list(map(str, self.thread_manager.error.call_args_list))
self.assertEqual(10, len(error_strs))
self.assertTrue(all(['Exception: I went boom!' in s for s in
error_strs]))
self.assertEqual(10, self.error_counter[0])
expected_items = set(['go boom'] +
['item%d' % i for i in range(20)
if i % 2 == 0])
self.assertQueueContains(self.got_items, expected_items)
self.assertQueueContains(
self.got_args_kwargs,
[(('yup, I made a connection', '1arg', '2arg'), {'a': 'b'})] * 20)
self.assertEqual(self.stored_results, ['best result EVAR!'] * 10)
def test_context_manager_with_client_exceptions(self):
with self.qfq as input_queue:
self.assertEqual(self.starting_thread_count + self.thread_count,
threading.active_count())
for i in range(20):
input_queue.put('item%d' % i if i % 2 == 0 else 'c boom')
self.assertEqual(self.starting_thread_count, threading.active_count())
error_strs = list(map(str, self.thread_manager.error.call_args_list))
self.ass |
twerkmeister/iLID | preprocessing/audio/__init__.py | Python | mit | 141 | 0.007092 | __all__ | = ["melfilterbank", "windowing", "spectrogram", "resample"]
import melfilterbank
import windowing
i | mport spectrogram
import resample |
dark1729dragon/pixutils | pixutils/BridgeIt.py | Python | bsd-2-clause | 7,874 | 0.002413 | from __future__ import nested_scopes, generators, division, absolute_import, with_statement, print_function, unicode_literals
from pixutils.one_shot_import import *
# ------------------ these modules will take some time to import but for simple implementation clumped together ------------------ #
'''
These functions supports to convert one form of data to other.
Example: r2bb convert opencv rectange (x,y,w,h) to bounding box format (x0,y0,x1,y1)
'''
def bb_bias((x0, y0, x1, y1), scale=None, bias=(0, 0, 0, 0), win=None):
'''
This function will increase or decrease the size of rectangle roi by the bias factor
This function can also be used to fit the rectangle inside the image
It normalize the coordinates and avoid negative values
bias: (left_bias,top_bias,right_bias,bottom_bias)
Generally used funcitons
Note:
The bb need 4 points (x0,y0,x1,y1) so if your have (x,y,w,h) use r2bb to convert to point
Example bb_bias(img,r2bb(x,y,w,h))
The scale need 2 independent values
The bias need 4 independent bias values (This is not a rectangle coordinate)
Examples
--------
img = np.zeros((1469, 1531),'u1')
bb = bb_bias((0, 0, 100, 100),(2,3), (22, 33, 27, 4),win=img.shape)
print(bb)
# (0, 0, 123, 196)
bb = bb_bias((-9, -8, 150, 103),win=img.shape) # fit the bbangle inside image
print(bb)
# (0, 0, 150, 103)
bb = bb_bias(r2bb((120, 134, 184, 127)),(2,.5), (16, 34, 18, -37))
# (12, 131, 378, 266)
print(bb)
'''
x0b, y0b, x1b, y1b = bias
if scale is not None:
a, b = 1 - scale[0], 1 + scale[0]
c, d = 1 - scale[1], 1 + scale[1]
if win is None:
x0, y0, x1, y1 = (x1 * a + x0 * b) / 2 - x0b,\
(y1 * c + y0 * d) / 2 - y0b,\
(x1 * b + x0 * a) / 2 - x1b, \
(y1 * d + y0 * c) / 2 - y1b
return int(x0), int(y0), int(x1), int(y1)
else:
x0, y0, x1, y1 = max(0, (x1 * a + x0 * b) / 2 - x0b),\
max(0, (y1 * c + y0 * d) / 2 - y0b), \
min(win[1], (x1 * b + x0 * a) / 2 - x1b), \
min(win[0], (y1 * d + y0 * c) / 2 - y1b)
return int(x0), int(y0), int(max(x0, x1)), int(max(y0, y1))
else:
if win is None:
x0, y0, x1, y1 = x0 - x0b, y0 - y0b, x1 + x1b, y1 + y1b
return int(x0), int(y0), int(x1), int(y1)
else:
x0, y0, x1, y1 = max(0, x0 - x0b), max(0, y0 - y0b), min(win[1], x1 + x1b), min(win[0], y1 + y1b)
return int(x0), int(y0), int(max(x0, x1)), int(max(y0, y1))
def float2img(img, pixmin=0, pixmax=255, dtype=0):
'''
convert img to (0 to 255) range
'''
return cv2.normalize(img, None, pixmin, pixmax, 32, dtype)
def img2float(img, pixmin=0, pixmax=1, dtype=5):
'''
convert image to (0.0 to 1.0) range
'''
return cv2.normalize(img, None, pixmin, pixmax, 32, dtype)
def d2bb(d, asint=False):
'''
# Convert dlib rectangle to points
# Note: output will be always tuple
for i in find_face(img):
print(d2bb(i))
'''
return (int(d.left()), int(d.top()), int(d.right()), int(d.bottom())) if asint else \
(d.left(), d.top(), d.right(), d.bottom())
def d2r(d, asint=False):
'''
# Convert dlib rectangle to rectangle
# Note: output will be always tuple
for i in find_face(img):
print(d2r(i))
'''
x0, y0, x1, y1 = (int(d.left()), int(d.top()), int(d.right()), int(d.bottom())) if asint else \
(d.left(), d.top(), d.right(), d.bottom())
return x0, y0, x1 - x0, y1 - y0
def r2bb((x, y, w, h)):
'''
# Rectangle to bounding box converter
# Converts opencv rectangle (x,y,w,h) to bounding box (x0,y0,x1,y1)
# Note: output will be always tuple
# Examples
# --------
print(r2bb((267,132,67,92)))
# (267, 132, 334, 224)
'''
# dtype = type(rect)
# return np.array((x, y, x + w, y + h)) if dtype == np.ndarray else dtype((x, y, x + w, y + h))
return x, y, x + w, y + h
def bb2r((x0, y0, x1, y1)):
'''
# Point to rectangle converter
# Converts opencv point (x0,y0,x1,y1) to rectangle (x,y,w,h)
# Note: output will be always tuple
# Examples
# --------
print(bb2r((267,132,367,392)))
# (267, 132, 100, 260)
'''
# dtype = type(rect)
# return np.array((x0, y0, x1 - x0, y1 - y0)) if dtype == np.ndarray else dtype((x0, y0, x1 - x0, y1 - y0))
return x0, y0, x1 - x0, y1 - y0
def r2d((x, y, w, h)):
'''
# Convert rectangle to dlib rectangle
# Note: output will be always tuple
# Examples
# --------
print(r2d((267,132,67,92)))
# [(267, 132) (334, 224)]
# '''
return dlib.rectangle(x, y, x + w, y + h)
def bb2d((x0, y0, x1, y1)):
'''
# Convert point to dlib rectangle
# Examples
# --------
print(bb2d((267,132,67,92)))
# [(267, 132) (67, 92)]
# [(267, 132) (67, 92)]
'''
return dlib.rectangle(x0, y0, x1, y1)
def im2bb(img, scale=None, bias=(0, 0, 0, 0), win_fit=True):
'''
# Return starting and ending point of image
img = resize(cv2.imread(impath), (63,127))
print(im2bb(img)) # (0, 0, 127, 63)
img = put_bb(img, im2bb(img))
win('im2bb', 0)(img)
'''
return bb_bias((0, 0, img.shape[1], img.shape[0]), scale, bias, win=img.shape if win_fit else None)
def wh(img):
'''
# return widhth and height of the image
img = resize(cv2.imread(impath), (63,127))
print(wh(img)) # (127, 63)
'''
h, w = img.shape[:2]
return w, h
def plt2cv(fig):
'''
# convert matplotlib plot to opencv image
import matplotlib.pyplot as plt
impaths = glob(r'*.jpg')
fig, axs = plt.subplots(3,3)
for impath,ax in zip(impaths,axs.ravel()):
img = plt.imread(impath)
ax.imshow(img)
imgs = plt2cv(fi | g)
plt.show()
win('plt2cv', 0)(imgs)
'''
fig.canvas.draw()
w, h = fig.canvas.get_width_height()
img = np.array(fig.canvas.buffer_rgba(), dtype='u1').reshape(h, w, 4)
return img[..., [2, 1, 0]]
def rectmid((a, b, c, d)):
return int((a + c) / 2), int((b + d) / 2)
def splitpath(path, sep=os.sep):
'''
this function is used to recursively split the path and return img array
example:
# path = r'\\192.168.1.2\ | pixutils'
# print(splitpath(path))
['192.168.1.2', 'pixutils']
:param your_path:
:return:
'''
path = path.replace('\\',sep)
path = path.replace('/', sep)
return [p for p in path.split(sep) if p]
def im2txt(img, bookpath=None, resolution=1.0, sep=''):
'''
This function create text representation of pixels
useful to visualize how machine sees the image
Example:
--------
for pix in im2txt(img,resolution=1/256.0,sep=''):
print pix
im2txt(img, 'temp.txt',resolution=.1,sep='-')
:param img: input image
:param bookpath: text path or (None -> return the text)
:param resolution: resolution=.1 -> 255*.1 = 25; resolution=1/256.0
:param sep:
:return: if bookpath is None:
returns list of rows of image
else:
returns file path (bookpath)
'''
img = np.round((img * float(resolution)))
if img.min() >= 0:
val = str(len(str(img.max())) - 2)
np.savetxt(bookpath or 'temp.txt', img, fmt=str(b'%' + val + 'd'), delimiter=str(sep))
else:
val = str(len(str(img.max())) - 1)
np.savetxt(bookpath or 'temp.txt', img, fmt=str('%' + val + 'd'), delimiter=str(sep))
if bookpath is None:
with open('temp.txt', b'r') as book:
bookpath = book.read().split('\n')
dirop('temp.txt', remove=True)
return bookpath
|
luzfcb/projetoteste | projeto/urls.py | Python | lgpl-3.0 | 308 | 0.006494 | from django.conf.urls import patterns, include, url
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^$ | ', 'core.views.empresta_list_view', name | ='home'),
# url(r'^blog/', include('blog.urls')),
url(r'^admin/', include(admin.site.urls)),
)
|
mortardata/luigi | test/test_ssh.py | Python | apache-2.0 | 1,733 | 0 | # Copyright (c) 2012 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations u | nder
# the License.
from luigi.contrib.ssh import RemoteContext
import unittest
import subprocess
class TestMockedRemoteContext(unittest.TestCase):
def test_subprocess_delegation(self):
""" Test subprocess call structure using mock module """
orig_Popen = subprocess.Popen
self.last_test = None
def Popen(cmd, **kwargs):
self.last_test = cmd
subprocess.Popen = Popen
context = RemoteContext(
"some_host",
username= | "luigi",
key_file="/some/key.pub"
)
context.Popen(["ls"])
self.assertTrue("ssh" in self.last_test)
self.assertTrue("-i" in self.last_test)
self.assertTrue("/some/key.pub" in self.last_test)
self.assertTrue("luigi@some_host" in self.last_test)
self.assertTrue("ls" in self.last_test)
subprocess.Popen = orig_Popen
def test_check_output_fail_connect(self):
""" Test check_output to a non-existing host """
context = RemoteContext("__NO_HOST_LIKE_THIS__")
self.assertRaises(
subprocess.CalledProcessError,
context.check_output, ["ls"]
)
|
gangadharkadam/office_erp | erpnext/projects/doctype/change_request/change_request.py | Python | agpl-3.0 | 275 | 0.010909 | # Copyright (c) | 2013, Web Notes Technologies Pvt. Ltd. and Contributors and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class ChangeRequest(Document):
pass
| |
jarvis-fga/Projetos | Problema 2/lucas/src/testecomvetorizacao.py | Python | mit | 3,680 | 0.013381 | #!-*- coding: utf8 -*-
import pandas as pd
from sklearn.model_selection import cross_val_score
from collections import Counter
import numpy as np
classificacoes = pd.read_csv('all.csv', sep='\t') #Leia o arquivo e separe as tabulações
comentarios = classificacoes['comments'] #pegar a coluna de comentarios (titulada message)
palavras = comentarios.str.lower().str.split(' ') #jogar todas as letras para minúsculo e quebrá-las em palavras
dicionario = set() #criar um CONJUNTO (não permite repetição)
for lista in palavras: #colocar cada palabra encontrada no conjunto
dicionario.update(lista)
totalDePalavras = len(dicionario) #salvar o número de palavras catalogadas no conjunto
print(totalDePalavras)
tuplas = zip(dicionario, range(totalDePalavras)) #dar um índice a cada palavra encontrada
mapa = {palavra:indice for palavra, indice in tuplas} #criar um DICIONARIO capaz de retornar o índice de determinada palavra
def vetorizar_texto(texto, mapa): #transformar palavras em vetores
vetor = [0] * len(mapa) #o vetor terá len(mapa) números
for palavra in texto:
if palavra in mapa: # e para cada ocorrência de determinada palavra
posicao = mapa[palavra] #incrementar a posição do array correspondente
vetor[posicao] += 1
return vetor
vetoresdeTexto = [vetorizar_texto(texto, mapa) for texto in palavras] #fazer isso para cada um dos comentários
X = np.array(vetoresdeTexto) #Usar algoritmos de classificação como se faz com outros casos
Y = np.array(classificacoes['status'].tolist())
porcentagem_de_treino = 0.8
tamanho_do_treino = int(porcentagem_de_treino * len(Y))
tamanho_de_validacao = len(Y) - tamanho_do_treino
treino_dados = X[0:tamanho_do_treino]
treino_marcacoes = Y[0:tamanho_do_treino]
validacao_dados = X[tamanho_do_treino:]
validacao_marcacoes = Y[tamanho_do_treino:]
def treinarePrever(nome, modelo, treino_dados, treino_marcacoes):
k=10
scores = cross_val_score(modelo, treino_dados, treino_marcacoes)
taxa_de_acerto = np.mean(scores)
msg = "Taxa de acerto do {0}: {1}".format(nome, taxa_de_acerto)
print(msg)
return taxa_de_acerto
resultados = {}
from sklearn.multiclass import OneVsRestClassifier
from sklearn.svm import LinearSVC
modeloOneVsRest = OneVsRestClassifier(LinearSVC(random_state = 0))
resultadoOneVsRest = treinarePrever("OneVsRest", modeloOneVsRest, treino_dados, treino_marcacoes)
resultados[resultadoOneVsRest] = modeloOneVsRest
from sklearn.multiclass import OneVsOneClassifier
modeloOneVsOne = OneVsOneClassifier(LinearSVC(random_state = 0))
resultado | OneVsOne = treinarePrever("OneVsOne", modeloOneVsOne, treino_dados, treino_marcacoes)
re | sultados[resultadoOneVsOne] = modeloOneVsOne
from sklearn.naive_bayes import MultinomialNB
modeloMultinomial = MultinomialNB()
resultadoMultinomial = treinarePrever("MultinomialNB", modeloMultinomial, treino_dados, treino_marcacoes)
resultados[resultadoMultinomial] = modeloMultinomial
from sklearn.ensemble import AdaBoostClassifier
modeloAdaBoost = AdaBoostClassifier()
resultadoAdaBoost = treinarePrever("AdaBoostClassifier", modeloAdaBoost, treino_dados, treino_marcacoes)
resultados[resultadoAdaBoost] = modeloAdaBoost
print (resultados)
vencedor = resultados[max(resultados)]
print ("Vencedor: {}".format(vencedor))
vencedor.fit(treino_dados, treino_marcacoes)
resultado = vencedor.predict(validacao_dados)
acertos = (resultado == validacao_marcacoes)
total_de_acertos = sum(acertos)
total_de_elementos = len(validacao_marcacoes)
taxa_de_acerto = 100.0 * total_de_acertos / total_de_elementos
print("Taxa de acerto do vencedor: {}".format(taxa_de_acerto))
|
tensorflow/tpu | models/official/resnet/benchmark/resnet_benchmark.py | Python | apache-2.0 | 8,651 | 0.005895 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Train a ResNet-50 model on ImageNet on TPU."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import csv
import os
import re
import sys
import time
from absl import app
from absl import flags
import tensorflow.compat.v1 as tf
# For Cloud environment, add parent directory for imports
sys.path.append(os.path.dirname(os.path.abspath(sys.path[0])))
from official.resnet import imagenet_input # pylint: disable=g-import-not-at-top
from official.resnet import resnet_main
from tensorflow.contrib import cluster_resolver as contrib_cluster_resolver
from tensorflow.contrib import tpu as contrib_tpu
from tensorflow.python.estimator import estimator
FLAGS = tf.flags.FLAGS
CKPT_PATTERN = r'model\.ckpt-(?P<gs>[0-9]+)\.data'
flags.DEFINE_string(
'data_dir_small', default=None,
help=('The directory where the resized (160x160) ImageNet input data is '
'stored. This is only to be used in conjunction with the '
'resnet_benchmark.py script.'))
flags.DEFINE_bool(
'use_fast_lr', default=False,
help=('Enabling this uses a faster learning rate schedule along with '
'different image sizes in the input pipeline. This is only to be '
'used in conjunction with the resnet_benchmark.py script.'))
# Number of training and evaluation images in the standard ImageNet dataset
NUM_TRAIN_IMAGES = 1281167
NUM_EVAL_IMAGES = 50000
def main(unused_argv):
tpu_cluster_resolver = contrib_cluster_resolver.TPUClusterResolver(
FLAGS.tpu, zone=FLAGS.tpu_zone, project=FLAGS.gcp_project)
config = contrib_tpu.RunConfig(
cluster=tpu_cluster_resolver,
model_dir=FLAGS.model_dir,
save_checkpoints_steps=FLAGS.iterations_per_loop,
keep_checkpoint_max=None,
tpu_config=contrib_tpu.TPUConfig(
iterations_per_loop=FLAGS.iterations_per_loop,
num_shards=FLAGS.num_cores,
per_host_input_for_training=contrib_tpu.InputPipelineConfig.PER_HOST_V2)) # pylint: disable=line-too-long
# Input pipelines are slightly different (with regards to shuffling and
# preprocessing) between training and evaluation.
imagenet_train = imagenet_input.ImageNetInput(
is_training=True,
data_dir=FLAGS.data_dir,
use_bfloat16=True,
transpose_input=FLAGS.transpose_input)
imagenet_eval = imagenet_input.ImageNetInput(
is_training=False,
data_dir=FLAGS.data_dir,
use_bfloat16=True,
transpose_input=FLAGS.transpose_input)
if FLAGS.use_fast_lr:
resnet_main.LR_SCHEDULE = [ # (multiplier, epoch to start) tuples
(1.0, 4), (0.1, 21), (0.01, 35), (0.001, 43)
]
imagenet_train_small = imagenet_input.ImageNetInput(
is_training=True,
image_size=128,
data_dir=FLAGS.data_dir_small,
num_parallel_calls=FLAGS.num_parallel_calls,
use_bfloat16=True,
transpose_input=FLAGS.transpose_input,
cache=True)
imagenet_eval_small = imagenet_input.ImageNetInput(
is_training=False,
image_size=128,
data_dir=FLAGS.data_dir_small,
num_parallel_calls=FLAGS.num_parallel_calls,
use_bfloat16=True,
transpose_input=FLAGS.transpose_input,
cache=True)
imagenet_train_large = imagenet_input.ImageNetInput(
is_training=True,
image_size=288,
data_dir=FLAGS.data_dir,
num_parallel_calls=FLAGS.num_parallel_calls,
use_bfloat16=True,
transpose_input=FLAGS.transpose_input)
imagenet_eval_large = imagenet_input.ImageNetInput(
is_training=False,
image_size=288,
data_dir=FLAGS.data_dir,
num_parallel_calls=FLAGS.num_parallel_calls,
use_bfloat16=True,
transpose_input=FLAGS.transpose_input)
resnet_classifier = contrib_tpu.TPUEstimator(
use_tpu=FLAGS.use_tpu,
model_fn=resnet_main.resnet_model_fn,
config=config,
train_batch_size=FLAGS.train_batch_size,
eval_batch_size=FLAGS.eval_batch_size)
if FLAGS.mode == 'train':
current_step = estimator._load_global_step_from_checkpoint_dir(FLAGS.model_dir) # pylint: disable=protected-access,line-too-long
batches_per_epoch = NUM_TRAIN_IMAGES / FLAGS.train_batch_size
tf.logging.info('Training for %d steps (%.2f epochs in total). Current'
' step %d.' % (FLAGS.train_steps,
FLAGS.train_steps / batches_per_epoch,
current_step))
start_timestamp = time.time() # This time will include compilation time
# Write a dummy file at the start of training so that we can measure the
# runtime at each checkpoint from the file write time.
tf.gfile.MkDir(FLAGS.model_dir)
if not tf.gfile.Exists(os.path.join(FLAGS.model_dir, 'START')):
with tf.gfile.GFile(os.path.join(FLAGS.model_dir, 'START'), 'w') as f:
f.write(str(start_timestamp))
if FLAGS.use_fast_lr:
small_steps = int(18 * NUM_TRAIN_IMAGES / FLAGS.train_batch_size)
normal_steps = int(41 * NUM_TRAIN_IMAGES / FLAGS.train_batch_size)
large_steps = int(min(50 * NUM_TRAIN_IMAGES / FLAGS.train_batch_size,
FLAGS.train_steps))
resnet_classifier.train(
input_fn=imagenet_train_small.input_fn, max_steps=small_steps)
resnet_classifier.train(
input_fn=imagenet_train.input_fn, max_steps=normal_steps)
resnet_classifier.train(
input_fn=imagenet_train_large.input_fn,
max_steps=large_steps)
else:
resnet_classifier.train(
input_fn=imagenet_train.input_fn, max_steps=FLAGS.train_steps)
else:
assert FLAGS.mode == 'eval'
start_timestamp = tf.gfile.Stat(
os.path.join(FLAGS.model_dir, 'START')).mtime_nsec
results = []
eval_steps = NUM_EVAL_IMAGES // FLAGS.eval_batch_size
ckpt_steps = set()
all_files = tf.gfile.ListDirectory(FLAGS.model_dir)
for f in all_files:
mat = re.match(CKPT_PATTERN, f)
if mat is not None:
ckpt_steps.add(int(mat.group('gs')))
ckpt_steps = sorted(list(ckpt_steps))
tf.logging.info('Steps to be evaluated: %s' % str(ckpt_steps) | )
for step in ckpt_steps:
ckpt = os.path.join(FLAGS.model_dir, 'model.ckpt | -%d' % step)
batches_per_epoch = NUM_TRAIN_IMAGES // FLAGS.train_batch_size
current_epoch = step // batches_per_epoch
if FLAGS.use_fast_lr:
if current_epoch < 18:
eval_input_fn = imagenet_eval_small.input_fn
if current_epoch >= 18 and current_epoch < 41:
eval_input_fn = imagenet_eval.input_fn
if current_epoch >= 41: # 41:
eval_input_fn = imagenet_eval_large.input_fn
else:
eval_input_fn = imagenet_eval.input_fn
end_timestamp = tf.gfile.Stat(ckpt + '.index').mtime_nsec
elapsed_hours = (end_timestamp - start_timestamp) / (1e9 * 3600.0)
tf.logging.info('Starting to evaluate.')
eval_start = time.time() # This time will include compilation time
eval_results = resnet_classifier.evaluate(
input_fn=eval_input_fn,
steps=eval_steps,
checkpoint_path=ckpt)
eval_time = int(time.time() - eval_start)
tf.logging.info('Eval results: %s. Elapsed seconds: %d' %
(eval_results, eval_time))
results.append([
current_epoch,
elapsed_hours,
'%.2f' % (eval_results['top_1_accuracy' |
ecsnavarretemit/sarai-interactive-maps-backend | bootstrap.py | Python | mit | 251 | 0.003984 | #!/usr/bin/env python
# bootstrap.py
#
# Copyright(c) Exequiel Ceas | ar Navarrete <esnavarrete1@up.edu.ph>
# Licensed under MIT
# Version 1.0.0-alpha6
from app import db
# create all databases and tables included in the ap | plication
db.create_all()
|
tarballs-are-good/sympy | sympy/physics/quantum/anticommutator.py | Python | bsd-3-clause | 4,489 | 0.001782 | """The anti-commutator: {A,B} = A*B + B*A."""
from sympy import S, Expr, Mul, Integer
from sympy.printing.pretty.stringpict import prettyForm
from sympy.physics.quantum.qexpr import split_commutative_parts
from sympy.physics.quantum.operator import Operator
from sympy.physics.quantum.dagger import Dagger
__all__ = [
'AntiCommutator'
]
#-----------------------------------------------------------------------------
# Anti-commutator
#-----------------------------------------------------------------------------
class AntiCommutator(Expr):
"""The standard anticommutator, in an unevaluated state.
The commutator is defined [1] as: {A, B} = A*B + B*A, but in this class
the anticommutator is initially unevaluated. To expand the anticommutator
out, use the ``doit`` method.
The arguments of the anticommutator are put into canonical order using
``__cmp__``, so that {B,A} becomes {A,B}.
Parameters
==========
A : Expr
The first argument of the anticommutator {A,B}.
B : Expr
The second argument of the anticommutator {A,B}.
Examples
========
>>> from sympy import symbols
>>> from sympy.physics.quantum import AntiCommutator
>>> from sympy.physics.quantum import Operator, Dagger
>>> x, y = symbols('xy')
>>> A = Operator('A')
>>> B = Operator('B')
Create an anticommutator and use ``doit`` to multiply them out.
>>> ac = AntiCommutator(A,B); ac
{A,B}
>>> ac.doit()
A*B + B*A
The commutator orders it arguments in canonical order::
>>> ac = AntiCommutator(B,A); ac
{A,B}
Scalar constants are factored out::
>>> AntiCommutator(3*x*A,x*y*B)
3*y*x**2*{A,B}
Dagger is alto handled::
>>> Dagger(AntiCommutator(A,B))
{Dagger(A),Dagger(B)}
References
==========
[1] http://en.wikipedia.org/wiki/Commutator
"""
def __new__(cls, A, B, **old_assumptions):
r = cls.eval(A, B)
if r is not None:
return r
obj = Expr.__new__(cls, *(A, B), **{'commutative': False})
return obj
@classmethod
def eval(cls, a, b):
"""The Commutator [A,B] is on canonical form if A < B.
"""
if not (a and b): return S.Zero
if a == b: return Integer(2)*a**2
if a.is_commutative or b.is_commutative:
return Integer(2)*a*b
# [xA,yB] -> xy*[A,B]
# from sympy.physics.qmul import QMul
c_part = []
nc_part = []
nc_part2 = []
if isinstance(a, Mul):
c_part, nc_part = split_commutative_parts(a)
if isinstance(b, Mul):
c_part2, nc_part2 = split_commutative_parts(b)
c_part.extend(c_part2)
if c_part:
a = nc_part or [a]
b = nc_part2 or [b]
return Mul(Mul(*c | _part | ), cls(Mul(*a), Mul(*b)))
# Canonical ordering of arguments
if a.compare(b) == 1:
return cls(b,a)
def _eval_expand_anticommutator(self, **hints):
# No changes, so return self
return self
def doit(self, **hints):
A = self.args[0]
B = self.args[1]
if isinstance(A, Operator) and isinstance(B, Operator):
try:
comm = A._eval_anticommutator(B, **hints)
except NotImplementedError:
try:
comm = B._eval_anticommutator(A, **hints)
except NotImplementedError:
comm = None
if comm is not None:
return comm.doit(**hints)
return (A*B + B*A).doit(**hints)
def _eval_dagger(self):
return AntiCommutator(Dagger(self.args[0]), Dagger(self.args[1]))
def _sympyrepr(self, printer, *args):
return "%s(%s,%s)" % (self.__class__.__name__, self.args[0],\
self.args[1])
def _sympystr(self, printer, *args):
return "{%s,%s}" % (self.args[0], self.args[1])
def _pretty(self, printer, *args):
pform = printer._print(self.args[0], *args)
pform = prettyForm(*pform.right((prettyForm(u','))))
pform = prettyForm(*pform.right((printer._print(self.args[1], *args))))
pform = prettyForm(*pform.parens(left='{', right='}'))
return pform
def _latex(self, printer, *args):
return "\\left{}%s,%s\\right}" % tuple([
printer._print(arg, *args) for arg in self.args])
|
vicnet/weboob | weboob/tools/capabilities/housing/housing_test.py | Python | lgpl-3.0 | 5,981 | 0.000167 | # -*- coding: utf-8 -*-
# Copyright(C) 2018 Phyks
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import itertools
from collections import Counter
from weboob.capabilities.base import empty
from weboob.capabilities.housing import POSTS_TYPES
class HousingTest(object):
"""
Testing class to standardize the housing modules tests.
"""
# Fields to be checked for values across all items in housings list
FIELDS_ALL_HOUSINGS_LIST = [
"id", "type", "advert_type", "house_type", "url", "title", "area",
"cost", "currency", "utilities", "date", "location", "station", "text",
"phone", "rooms", "bedrooms", "DPE", "GES", "details"
]
# Fields to be checked for at least one item in housings list
FIELDS_ANY_HOUSINGS_LIST = [
"photos"
]
# Fields to be checked for values across all items when querying
# individually
FIELDS_ALL_SINGLE_HOUSING = [
"id", "url", "type", "advert_type", "house_type", "title", "area",
"cost", "currency", "utilities", "date", "location", "station", "text",
"phone", "rooms", "bedrooms", "DPE", "GES", "details"
]
# Fields to be checked for values at least once for all items when querying
# individually
FIELDS_ANY_SINGLE_HOUSING = [
"photos"
]
# Some backends cannot distinguish between rent and furnished rent for
# single housing post. Set this to True if this is the case.
DO_NOT_DISTINGUISH_FURNISHED_RENT = False
def assertNotEmpty(self, obj, field):
self.assertFalse(
empty(getattr(obj, field)),
'Field "%s" is empty and should not be.' % field
)
def check_housing_lists(self, query):
results = list(itertools.islice(
self.backend.search_housings(query),
20
))
self.assertGreater(len(results), 0)
for field in self.FIELDS_ANY_HOUSINGS_LIST:
self.assertTrue(
any(not empty(getattr(x, field)) for x in results),
'Missing a "%s" field.' % field
)
for x in results:
if 'type' in self.FIELDS_ALL_HOUSINGS_LIST:
self.assertEqual(x.type, query.type)
if 'advert_type' in self.FIELDS_ALL_HOUSINGS_LIST:
self.assertIn(x.advert_type, query.advert_types)
if 'house_type' in self.FIELDS_ALL_HOUSINGS_LIST:
self.assertIn(x.house_type, query.house_types)
for field in self.FIELDS_ALL_HOUSINGS_LIST:
self.assertNotEmpty(x, field)
if not empty(x.cost):
self.assertNotEmpty(x, 'price_per_meter')
for photo in x.photos:
self.assertRegexpMatches(photo.url, r'^http(s?)://')
return results
def check_single_housing_all(self, housing,
type, house_types, advert_type):
for field in self.FIELDS_ALL_SINGLE_HOUSING:
self.assertNotEmpty(housing, field)
if 'type' in self.FIELDS_ALL_SINGLE_HOUSING:
if (
self.DO_NOT_DISTINGUISH_FURNISHED_RENT and
type in [POSTS_TYPES.RENT, POSTS_TYPES.FURNISHED_RENT]
):
self.assertIn(housing.type,
[POSTS_TYPES.RENT, POSTS_TYPES.FURNISHED_RENT])
else:
self.assertEqual(housing.type, type)
if 'house_type' in self.FIELDS_ALL_SINGLE_HOUSING:
if not empty(house_types):
self.assertEqual(housing.house_type, house_types)
else:
self.assertNotEmpty(housing, 'house_type')
if 'advert_type' in self.FIELDS_ALL_SINGLE_HOUSING:
self.assertEqual(housing.advert_type, ad | vert_type)
def check_single_housing_any(self, housing, counter):
for field in self.FIELDS_ANY_SINGLE_HOUSING:
if not empty(getattr(housing, field)):
counter[field] += 1
for photo in housing.photos:
self.assertRegexpMatches(photo.url, r'^http(s?)://')
return counter
def check_against_query(self, query):
# Check housin | g listing results
results = self.check_housing_lists(query)
# Check mandatory fields in all housings
housing = self.backend.get_housing(results[0].id)
self.backend.fillobj(housing, 'phone') # Fetch phone
self.check_single_housing_all(
housing,
results[0].type,
results[0].house_type,
results[0].advert_type
)
# Check fields that should appear in at least one housing
counter = Counter()
counter = self.check_single_housing_any(housing, counter)
for result in results[1:]:
if all(counter[field] > 0 for field in
self.FIELDS_ANY_SINGLE_HOUSING):
break
housing = self.backend.get_housing(result.id)
self.backend.fillobj(housing, 'phone') # Fetch phone
counter = self.check_single_housing_any(housing, counter)
for field in self.FIELDS_ANY_SINGLE_HOUSING:
self.assertGreater(
counter[field],
0,
'Optional field "%s" should appear at least once.' % field
)
|
benbaptist/pymine2 | plugin.py | Python | gpl-2.0 | 849 | 0.007067 | import glob, imp
import events
class EventManager:
def __init__(self):
self.Chat_Message_Event = events.ChatMessageEventHandler()
self.Player_Join_Event = events.PlayerJoinEventHandler()
self.Player_Leave_Event = events.PlayerLeaveEventHan | dler()
self.Player_Move_Event = events.PlayerMoveEventHandler()
self.Command_Event = events.CommandEventHandler()
self.Packet_Recv_Event = events.PacketRecvEventHandler()
class PluginManager:
def __init__(self, server):
self.plugins = {}
self.server = server
def load_plugins(se | lf):
for plugin in glob.glob("plugins/*_plugin.py"):
plugin_name = plugin[8:-10]
self.plugins[plugin_name] = imp.load_source(plugin_name, plugin)
getattr(self.plugins[plugin_name],plugin_name)(self.server) |
team-vigir/vigir_behaviors | behaviors/vigir_behavior_trigger_cutting_tool/src/vigir_behavior_trigger_cutting_tool/trigger_cutting_tool_sm.py | Python | bsd-3-clause | 17,004 | 0.022406 | #!/usr/bin/env python
###########################################################
# WARNING: Generated code! #
# ************************** #
# Manual changes may get lost if file is generated again. #
# Only code inside the [MANUAL] tags will be kept. #
###########################################################
import roslib; roslib.load_manifest('vigir_behavior_trigger_cutting_tool')
from flexbe_core import Behavior, Autonomy, OperatableStateMachine, Logger
from flexbe_states.calculation_state import CalculationState
from vigir_flexbe_states.plan_endeffector_pose_state import PlanEndeffectorPoseState
from vigir_flexbe_states.execute_trajectory_msg_state import ExecuteTrajectoryMsgState
from flexbe_states.decision_state import DecisionState
from vigir_flexbe_states.plan_endeffector_cartesian_waypoints_state import PlanEndeffectorCartesianWaypointsState
from flexbe_states.operator_decision_state import OperatorDecisionState
from vigir_flexbe_states.moveit_predefined_pose_state import MoveitPredefinedPoseState
from vigir_flexbe_states.get_wrist_pose_state import GetWristPoseState
from vigir_flexbe_states.get_pose_in_frame_state import GetPoseInFrameState
from flexbe_states.log_state import LogState
from flexbe_states.flexible_calculation_state import FlexibleCalculationState
# Additional imports can be added inside the following tags
# [MANUAL_IMPORT]
import rospy
import math
import copy
from geometry_msgs.msg import PoseStamped, Pose, Point, Quaternion
from std_msgs.msg import Header
from flexbe_core.proxy import ProxyPublisher
# [/MANUAL_IMPORT]
'''
Created on Tue May 12 2015
@author: Dorothea Koert, Philipp Schillinger
'''
class TriggerCuttingToolSM(Behavior):
'''
Switch the cutting tool on or off.
'''
def __init__(self):
super(TriggerCuttingToolSM, self).__init__()
self.name = 'Trigger Cutting Tool'
# parameters of this behavior
self.add_parameter('hand_side', 'left')
# references to used behaviors
# Additional initialization code can be added inside the following tags
# [MANUAL_INIT]
self._scaling_factor = 0
self._pub1 = rospy.Publisher('/bla1', PoseStamped)
self._pub2 = rospy.Publisher('/bla2', PoseStamped)
# [/MANUAL_INIT]
# Behavior comments:
# O 453 228
# Start spiral again with its origin at the current position
# ! 3 256
# Skip predefined pre_poke_pose for now
def create(self):
number_of_points = 9
scaling_factor = 2 # for spiral pattern
attempts_per_point = 3
arm_controller = ExecuteTrajectoryMsgState.CONTROLLER_LEFT_ARM if self.hand_side == 'left' else ExecuteTrajectoryMsgState.CONTROLLER_RIGHT_ARM
poking_stick_frame = self.hand_side + '_poking_stick'
# x:183 y:40, x:283 y:290
_state_machine = OperatableStateMachine(outcomes=['finished', 'failed'])
_state_machine.userdata.none = None
_state_machine.userdata.hand_side = self.hand_side
# Additional creation code can be added inside the following tags
# [MANUAL_CREATE]
self._scaling_factor = scaling_factor
# [/MANUAL_CREATE]
# x:637 y:484, x:391 y:152
_sm_calculate_poke_poses_0 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['hand_side'], output_keys=['poke_waypoints', 'poke_frame_id', 'pre_poke_waypoints', 'pre_poke_frame_id'])
with _sm_calculate_poke_poses_0:
# x:63 y:78
OperatableStateMachine.add('Get_Current_Endeffector_Pose',
GetWristPoseState(),
transitions={'done': 'Transform_Endeffector_Pose', 'failed': 'failed'},
autonomy={'done': Autonomy.Low, 'failed': Autonomy.Low},
remapping={'hand_side': 'hand_side', 'wrist_pose': 'wrist_pose'})
# x:67 y:178
OperatableStateMachine.add('Transform_Endeffector_Pose',
GetPoseInFrameState(target_frame=poking_stick_frame),
transitions={'done': 'Translate_To_Poke_Pose', 'failed': 'failed'},
autonomy={'done': Autonomy.Off, 'failed': Autonomy.Off},
remapping={'pose_in': 'wrist_pose', 'pose_out': 'pre_poke_pose'})
# x:77 y:278
OperatableStateMachine.add('Translate_To_Poke_Pose',
CalculationState(calculation=self.calc_poke_pose),
transitions={'done': 'Pre_Poke_Pose_To_Waypoints'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'pre_poke_pose', 'output_value': 'poke_pose'})
# x:324 y:378
OperatableStateMachine.add('Poke_Pose_To_Waypoints',
CalculationState(calculation=lambda x: [x.pose]),
transitions={'done': 'Set_Poke_Waypoints_Frame'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'poke_pose', 'output_value': 'poke_waypoints'})
# x:569 y:378
OperatableStateMachine.add('Set_Poke_Waypoints_Frame',
CalculationState(calculation=lambda x: x.header.frame_id),
transitions={'done': 'finished'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'poke_pose', 'output_value': 'poke_frame_id'})
# x:313 y:278
OperatableStateMachine.add('Pre_Poke_Pose_To_Waypoints',
CalculationState(calculation=lambda x: [x.pose]),
transitions={'done': 'Set_Pre_Poke_Waypoints_Frame'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'pre_poke_pose', 'output_value': 'pre_poke_waypoints'})
# x:558 y:278
OperatableStateMachine.add('Set_Pre_Poke_Waypoints_Frame',
CalculationState(calculation=lambda x: x.header.frame_id),
transitions={'done': 'Poke_Pose_To_Waypoints'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'pre_poke_pose', 'output_value': 'pre_poke_frame_id'})
# x:433 y:40, x:441 y:253
_sm_poke_1 = OperatableStateMachine(outcomes=['finished', 'failed'], input_keys=['none', 'hand_side', 'poke_waypoints', 'poke_frame_id', 'pre_poke_waypoints', 'pre_poke_frame_id'])
with _sm_poke_1:
# x:49 y:78
OperatableStateMachine.add('Init_Inner_Index',
CalculationState(calculation=lambda x: 0),
transitions={'done': 'Plan_To_Poke_Pose'},
autonomy={'done': Autonomy.Off},
remapping={'input_value': 'none', 'output_value': 'poking_index'})
# x:76 y:328
OperatableStateMachine.add('Move_To_Poke_Pose',
ExecuteTrajectoryMsgState(controller=arm_controller),
transitions={'done': 'Plan_To_Pre_Poke_Pose', 'failed': 'failed'},
autonomy={'done': Autonomy.Low, 'failed': Autonomy.High},
remapping={'joint_trajectory': 'joint_trajectory'})
# x:626 y:378
OperatableStateMachine.add('Move_To_Pre_Poke_Pose',
ExecuteTrajectoryMsgState(controller=arm_controller),
transitions={'done': 'Increase_Inner_Index', 'failed': 'failed'},
autonomy={'done': Autonomy.Low, 'failed': Autonomy.High},
remapping={'joint_trajectory': 'joint_trajectory'})
# x:636 y:178
OperatableStateMachine.add('Increase_Inner_Index',
CalculationState(calculation=lambda x: x+1),
tr | ansitions={'done': 'Check_Inner_Index'},
autonomy={'done': Autonomy.Off},
re | mapping={'input_value': 'poking_index', 'output_value': 'poking_index'})
# x:392 y:178
OperatableStateMachine.add('Check_Inner_Index',
DecisionState(outcomes=['continue','finished'], conditions=lambda x: 'continue' if x<attempts_per_point else 'finished'),
transitions={'continue': 'Plan_To_Poke_Pose', 'finished': 'finished'},
autonomy={'continue': Autonomy.Low, 'finished': Autonomy.Low},
remapping={'input_value': 'poking_index'})
# x:46 y:178
OperatableStateMachine.add('Plan_To_Poke_Pose',
PlanEndeffectorCartesianWaypointsState(ignore_collisions=True, include_torso=False, keep_endeffector_orientation=False, allow_incomplete_plans=True, vel_scaling=0.2, planner_id="RRTConnectkConfigDefault"),
transitions={'planned': 'Move_To_Poke_Pose', 'incomplete': 'Move_To_Poke_Pose', 'failed': 'failed'},
autonomy={'planned': Autonomy.Low, 'incomplete': Autonomy.Low, 'failed': Autonomy.High},
remapping={'waypoints': 'poke_waypoints', 'hand': 'hand_si |
mvaled/sentry | src/sentry/south_migrations/0391_auto__add_fileblobowner__add_unique_fileblobowner_blob_organization__a.py | Python | bsd-3-clause | 99,198 | 0.008045 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
from sentry.utils.db import is_postgres
class Migration(SchemaMigration):
# Flag to indicate if this migration is too risky
# to run online and needs to be coordinated for offline
is_dangerous = True
def forwards(self, orm):
# Adding model 'FileBlobOwner'
db.create_table('sentry_fileblobowner', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('blob', self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')(
to=orm['sentry.FileBlob'])),
('organization', self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')(
to=orm['sentry.Organization'])),
))
db.send_create_signal('sentry', ['FileBlobOwner'])
# Adding unique constraint on 'FileBlobOwner', fields ['blob', 'organization']
db.create_unique('sentry_fileblobowner', ['blob_id', 'organization_id'])
# Adding index on 'File', fields ['checksum']
if is_postgres():
db.commit_transaction()
db.execute(
"CREATE INDEX CONCURRENTLY {} ON sentry_file (checksum)".format(
db.create_index_name('sentry_file', ['checksum']),
)
)
db.start_transaction()
else:
db.create_index('sentry_file', ['checksum'])
def backwards(self, orm):
# Removing index on 'File', fields ['checksum']
db.delete_index('sentry_file', ['checksum'])
# Removing unique constraint on 'FileBlobOwner', fields ['blob', 'organization']
db.delete_unique('sentry_fileblobowner', ['blob_id', 'organization_id'])
# Deleting model 'FileBlobOwner'
db.delete_table('sentry_fileblobowner')
models = {
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.apiapplication': {
'Meta': {'object_name': 'ApiApplication'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'client_id': ('django.db.models.fields.CharField', [], {'default': "'9ddd19fefbad48d293ed3bfcbfbbd1fbc91807ef432b40d4a3b7f892be891e2b'", 'unique': 'True', 'max_length': '64'}),
'client_secret': ('sentry.db.models.fields.encrypted.EncryptedTextField', [], {'default': "'541a0a5030c34d9189af5835e31d337f7ca41d4d7cd4456980939b0c8fa431c6'"}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'homepage_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'default': "'Decent Reptile'", 'max_length': '64', 'blank': 'True'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'privacy_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}),
'redirect_uris': ('django.db.models.fields.TextField', [], {}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'terms_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'})
},
'sentry.apiauthorization': {
'Meta': {'unique_together': "(('user', 'application'),)", 'object_name': 'ApiAuthorization'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.apigrant': {
'Meta': {'object_name': 'ApiGrant'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']"}),
'code': ('django.db.models | .fields.CharField', [], {'default': "'a41ddfc7e4ca49a39d2b7e3edc26dcaf'", 'max_length': '64', 'db_index': 'True'}),
'expires_at': ( | 'django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 2, 8, 0, 0)', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'redirect_uri': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scope_list': ('sentry.db.models.fields.array.ArrayField', [], {'of': ('django.db.models.fields.TextField', [], {})}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.apitoken': {
'Meta': {'object_name': 'ApiToken'},
'application': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiApplication']", 'null': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'expires_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2018, 3, 10, 0, 0)', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'refresh_token': ('django.db.models.f |
DTMilodowski/LiDAR_canopy | src/LiDAR_tools.py | Python | gpl-3.0 | 3,971 | 0.0345 | import numpy as np
import laspy as las
# Determine if a point is inside a given polygon or not
# Polygon is a list of (x,y) pairs. This function
# returns True or False. The algorithm is called
# the "Ray Casting Method".
# the point_in_poly algorithm was found here:
# http://geospatialpython.com/2011/01/point-in-polygon.html
def point_in_poly(x,y,poly):
n = len(poly)
inside = False
p1x,p1y = poly[0]
for i in range(n+1):
p2x,p2y = poly[i % n]
if y > min(p1y,p2y):
if y <= max(p1y,p2y):
if x <= max(p1x,p2x):
if p1y != p2y:
xints = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x == p2x or x <= xints:
inside = not inside
p1x,p1y = p2x,p2y
return inside
# This one is my own version of the ray-trace algorithm which utilises the numpy arrays so that a list of x and y coordinates can be processed in one call and only points inside polygon are returned alongside the indices in case required for future referencing. This saves a fair bit of looping.
def points_in_poly(x,y,poly):
n = len(poly)
inside=np.zeros(x.size,dtype=bool)
xints=np.zeros(x.size)
p1x,p1y = poly[0]
for i in range(n+1):
p2x,p2y=poly[i % n]
if p1y!=p2y:
xints[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x)],axis=0)] = (y[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x)],axis=0)]-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x==p2x:
inside[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x)],axis=0)] = np.invert(inside[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x)],axis=0)])
else:
inside[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x),x<=xints],axis=0)] = np.invert(inside[np.all([y>min(p1y,p2y), y<=max(p1y,p2y), x<=max(p1x,p2x),x<=xints],axis=0)])
p1x,p1y = p2x,p2y
return x[inside],y[inside], inside
# This retrieves all points within circular neighbourhood, Terget point is the location around which the neighbourhood search is conducted, for a specified search radius. x and y are vectors with the x and y coordinates of the test points
def points_in_radius(x,y,target_x, target_y,radius):
inside=np.zeros(x.size,dtype=bool)
d2=(x-target_x)**2+(y-target_y)**2
inside = d2<=radius**2
return x[inside],y[inside], inside
# filter lidar wth polygon
# This function has been updated to include an option to filter by first return location.
# The reason for this is so full collections of returns associated with each LiDAR pulse
# can be retrieved, which can be an issue at edges in multi-return analyses
def filter_lidar_data_by_polygon(in_pts,polygon,filter_by_first_return_location = False):
pts = np.zeros((0,in_pts.shape[1]))
if in_pts.shape[0]>0:
if filter_by_first_return_location:
# find first returns
mask = in_pts[:,3]==1
x_temp, y_temp, inside_temp = points_in_poly(in_pts[mask,0],in_pts[mask,1],polygon)
shots = np.unique(in_pts[mask,6][inside_temp]) # index 6 refers to GPS time
inside = np.in1d(in_pts[:,6],shots) # this function retrieves all points corresponding to this GPS time
x = in_pts[inside,0]
y = in_pts[inside,1]
x_temp=None
y_temp=None
inside_temp=None
else:
x,y,inside = points_in_poly(in_pts[:,0],in_pts[:,1],polygon)
pts = in_pts[inside,:]
else:
print("\t\t\t no point | s in polygon")
return pts
# filter lidar by circular neighbourhood
def filter_lidar_data_by_neighbourhood(in_pts,target_xy,radius | ):
pts = np.zeros((0,in_pts.shape[1]))
if in_pts.shape[0]>0:
x,y,inside = points_in_radius(in_pts[:,0],in_pts[:,1],target_xy[0],target_xy[1],radius)
pts = in_pts[inside,:]
else:
print( "\t\t\t no points in neighbourhood")
return pts
|
plotly/plotly.py | packages/python/plotly/plotly/validators/scatter/hoverlabel/font/_size.py | Python | mit | 506 | 0.001976 | import _plotly_utils.basevalidators
| class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(
self, plotly_name="size", parent_name="scatter.hoverlabel.font", **kwargs
):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
array_ok=kwargs.pop("array_ | ok", True),
edit_type=kwargs.pop("edit_type", "none"),
min=kwargs.pop("min", 1),
**kwargs
)
|
fcopantoja/sips | sips/asuntos/models.py | Python | mit | 2,019 | 0.000991 | import uuid
from django.contrib.auth.models import User
from django.db import models
STATUS_CHOICES = (
('atendido', 'Atendido'),
('no_atendido', 'No Atendido'),
('en_proceso', 'En Proceso'),
)
class Municipio(models.Model):
name = models.CharField(max_length=100, blank=False, null=False)
def __unicode__(self):
return '%s' % self.name
class Tipo(models.Model):
name = models.CharField(max_length=100, blank=False, null=False)
def __unicode__(self):
return '%s' % self.name
class Asunto(models.Model):
folio = models.UUIDField(default=uuid.uuid4, editable=False)
nombre = models.CharField(max_length=100, blank=False, null=False)
curp = models.CharField(max_length=100, blank=False, null=False)
domicilio = models.CharField(max_length=100, blank=False, null=False)
colonia = models.CharField(max_length=100, blank=False, null=False)
municipio = models.ForeignKey(Municipio)
asunto = models.ForeignKey(Tipo)
competencia_ps = models.BooleanField('Competencia de la Procuraduria Social', default=False)
added = models.DateTimeField(auto_now_add=True)
status = models.CharField(choices=STATUS_CHOICES, null=True, max_length=20, blank=True, default='en_proceso')
agente_social = models.ForeignKey(User, null=True, blank=True)
def __unicode__(self):
return '%s' % (str(self.folio)[:8]).upper()
| def folio_(self):
return (str(self.folio)[:8]).upper()
def status_(self):
for choice in STATUS_CHOICES:
if self.status == choice[0]:
return choice[1]
class AsuntoEvento(models.Model):
asunto = models.ForeignKey(Asunto)
fecha_cita_usuario = models.DateTimeField(null=False)
observaciones_cita_usuario = models.TextField(blank=True, null=False)
fecha_visita_juzgado = models.D | ateTimeField(null=False)
juzgado = models.CharField(max_length=100, blank=False, null=False)
observaciones_visita_juzgado = models.TextField(blank=True, null=False)
|
lptorres/noah-inasafe | web_api/third_party/raven/utils/encoding.py | Python | gpl-3.0 | 3,604 | 0.00222 | """
raven.utils.encoding
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import warnings
def force_unicode(s, encoding='utf-8', errors='strict'):
"""
Similar to smart_unicode, except that lazy instances are resolved to
strings, rather than kept as lazy objects.
Adapted from Django
"""
try:
if not isinstance(s, basestring,):
if hasattr(s, '__unicode__'):
s = unicode(s)
else:
try:
s = unicode(str(s), encoding, errors)
except UnicodeEncodeError:
if not isinstance(s, Exception):
raise
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII data without special
# handling to display as a string. We need to handle this
# without raising a further exception. We do an
# approximation to what the Except | ion's standard str()
# output should be.
s = ' '.join([force_unicode(arg, encoding,
errors) for arg in s])
elif not isinstance(s, unicode):
# Note: We use .decode() here, instead of unicode(s, encoding,
# errors), so that if s | is a SafeString, it ends up being a
# SafeUnicode at the end.
s = s.decode(encoding, errors)
except UnicodeDecodeError, e:
if not isinstance(s, Exception):
raise UnicodeDecodeError(s, *e.args)
else:
# If we get to here, the caller has passed in an Exception
# subclass populated with non-ASCII bytestring data without a
# working unicode method. Try to handle this without raising a
# further exception by individually forcing the exception args
# to unicode.
s = ' '.join([force_unicode(arg, encoding,
errors) for arg in s])
return s
def transform(value):
from raven.utils.serializer import transform
warnings.warn('You should switch to raven.utils.serializer.transform', DeprecationWarning)
return transform(value)
def to_unicode(value):
try:
value = unicode(force_unicode(value))
except (UnicodeEncodeError, UnicodeDecodeError):
value = '(Error decoding value)'
except Exception: # in some cases we get a different exception
try:
value = str(repr(type(value)))
except Exception:
value = '(Error decoding value)'
return value
def to_string(value):
try:
return str(value.decode('utf-8').encode('utf-8'))
except:
return to_unicode(value).encode('utf-8')
def shorten(var, list_length=50, string_length=200):
from raven.utils.serializer import transform
var = transform(var)
if isinstance(var, basestring) and len(var) > string_length:
var = var[:string_length] + '...'
elif isinstance(var, (list, tuple, set, frozenset)) and len(var) > list_length:
# TODO: we should write a real API for storing some metadata with vars when
# we get around to doing ref storage
# TODO: when we finish the above, we should also implement this for dicts
var = list(var)[:list_length] + ['...', '(%d more elements)' % (len(var) - list_length,)]
return var
|
joyxu/kernelci-backend | app/utils/__init__.py | Python | agpl-3.0 | 5,533 | 0 | # Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Common functions, variables for all kernelci utils modules."""
import models
import utils.log
BASE_PATH = "/var/www/images/kernel-ci"
DEFAULT_MONGODB_URL = "localhost"
DEFAULT_MONGODB_PORT = 27017
DEFAULT_MONGODB_POOL = 250
LOG = utils.log.get_log()
# Pattern used for glob matching files on the filesystem.
BOOT_REPORT_PATTERN = "boot-*.json"
# Build log file names.
BUILD_LOG_FILE = "build.log"
BUILD_ERRORS_FILE = "build-errors.log"
BUILD_WARNINGS_FILE = "build-warnings.log"
BUILD_MISMATCHES_FILE = "build-mismatches.log"
def is_hidden(value):
"""Verify if a file name or dir name is hidden (starts with .).
:param value: The value to verify.
:return True or False.
"""
hidden = False
if value.startswith("."):
hidden = True
return hidden
def is_lab_dir(value):
"""Verify if a file name or dir name is a lab one.
A lab dir name starts with lab-.
:param value: The value to verify.
:return True or False.
"""
is_lab = False
if value.startswith("lab-"):
is_lab = True
return is_lab
def get_defconfig_full(defconfig_dir,
defconfig, defconfig_full, kconfig_fragments):
"""Get the value for defconfig_full variable based on available ones.
:param defconfig_dir: The defconfig directory we are parsing.
:type defconfig_dir: string
:param defconfig: The value for defconfig
:type defconfig: string
:param defconfig_full: The possible value for defconfig_full as taken from
the build json file.
:type defconfig_full: string
:param kconfig_fragments: The config fragments value where to start.
:type kconfig_fragments: string
:return The defconfig_full value.
"""
if all([defconfig_full is None, kconfig_fragments is None]):
defconfig_full = defconfig
elif all([defconfig_full is None, kconfig_fragments is not None]):
# Infer the real defconfig used from the values we have.
# Try first from the kconfig_fragments and then from the
# directory we are traversing.
defconfig_full_k = \
_extrapolate_defconfig_full_from_kconfig(
kconfig_fragments, defconfig)
defconfig_full_d = \
_extrapolate_defconfig_full_from_dirname(defconfig_dir)
# Default to use the one from kconfig_fragments.
defconfig_full = defconfig_full_k
# Use the one from the directory only if it is different from
# the one obtained via the kconfig_fragments and if it is
# different from the default defconfig value.
if all([
defconfig_full_d is not None,
defconfig_full_d != defconfig_full_k,
defconfig_full_d != defconfig]):
defconfig_full = defconfig_full_k
return defconfig_full
# pylint: disable=invalid-name
def _extrapolate_defconfig_full_from_kconfig(kconfig_fragments, defconfig):
"""Try to extrapolate a valid value for the defconfig_full argument.
When the kconfig_fragments filed is defined, it should have a default
structure.
:param kconfig_fragments: The config fragments value where to start.
:type kconfig_fragments: str
:param defconfig: The defconfig value to use. Will be returned if
`kconfig_fragments` does not match the known ones.
:type defconfig: str
:return A string with the `defconfig_full` value or the provided
`defconfig`.
"""
defconfig_full = defconfig
if all([kconfig_fragments.startswith("frag-"),
kconfig_fragments.endswith(".config")]):
defconfig_full = "%s+%s" % (
defconfig,
kconfig_fragments.replace("frag-", "").replace(".config", ""))
return defconfig_full
def _extrapolate_defconfig_full_from_dirname(dirname):
"""Try to extrapolate a valid defconfig_full value from the directory name.
The directory we are traversing are built with the following pattern:
ARCH-DEFCONFIG[+FRAGMENTS]
We strip the ARCH part and keep only the rest.
:param dirname: The name of the directory we are traversing.
:type dirname: str
:return None if the directory name does not match a valid pattern, or
the value extrapolated from it.
"""
def _replace_arch_value(arch, di | rname):
| """Local function to replace the found arch value.
:param arch: The name of the architecture.
:type arch: str
:param dirname: The name of the directory.
:param dirname: str
:return The directory name without the architecture value.
"""
return dirname.replace("%s-" % arch, "", 1)
defconfig_full = None
for arch in models.VALID_ARCHITECTURES:
if arch in dirname:
defconfig_full = _replace_arch_value(arch, dirname)
break
return defconfig_full
|
dstufft/ooni-backend | oonib/policy/api.py | Python | bsd-2-clause | 161 | 0 | fro | m oonib.policy import handlers
policyAPI = [
(r"/policy/nettest", handlers.NetTestPolicyHandler),
(r"/policy/input", handlers | .InputPolicyHandler),
]
|
datafiniti/Diamond | src/collectors/filestat/test/testfilestat.py | Python | mit | 1,903 | 0.002102 | #!/usr/bin/python
# coding=utf-8
################################################################################
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
try:
from cStringIO import StringIO
StringIO # workaround for pyflakes issue #13
except ImportError:
from StringIO import StringIO
from diamond.collector import Collector
from filestat import FilestatCollector
################################################################################
class TestFilestatCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('FilestatCollector', {
'interval': 10
})
self.collector = FilestatCollector(config, None)
def test_import(self):
self.assertTrue(FilestatCollector)
@patch('__builtin__.open')
@patch('os.access', Mock(return_value=True))
@patch.object(Collector, 'publish')
def te | st_should_open_ | proc_sys_fs_file_nr(self, publish_mock, open_mock):
open_mock.return_value = StringIO('')
self.collector.collect()
open_mock.assert_called_once_with('/proc/sys/fs/file-nr')
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
FilestatCollector.PROC = self.getFixturePath('proc_sys_fs_file-nr')
self.collector.collect()
metrics = {
'assigned': 576,
'unused': 0,
'max': 4835852
}
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
################################################################################
if __name__ == "__main__":
unittest.main()
|
AndroidSecurityTools/lobotomy | framework/enums/enums.py | Python | mit | 5,361 | 0.003544 | class ADBEnum(object):
commands = {
"am start": "adb shell am start"
}
class D2JEnum(object):
commands = {
"decompile": "dex2jar-2.0/d2j-dex2jar.sh --force --output"
}
class APIMappings(object):
mappings = {
"install_shortcut":
{
"permission": "com.android.launcher.permission.INSTALL_SHORTCUT",
"class":
{
"android.content.Intent.ShortcutIconResource":
{
"method": "fromContext"
}
},
"method": "fromContext"
},
"read_phone_state":
{
"permission": "android.permission.READ_PHONE_STATE",
"classes":
{
"android.telephony.PhoneStateListener":
{
"methods":
[
"onCallForwardingIndicatorChanged", "onCallStateChanged",
"onCellInfoChanged", "onCellLocationChanged", "onDataActivity",
"onDataConnectionStateChanged", "onMessageWaitingIndicatorChanged",
"onServiceStateChanged", "onSignalStrengthChanged",
"onSignalStrengthsChanged"
]
},
"android.telephony.ServiceState":
{
"methods":
[
"getlsManualSelection", "getOperatorAlphaLong",
"getOperatorAlphaShort", "getOperatorNumeric", "getRoaming", "getState",
"setlsManualSelection", "setOperatorName", "setRoaming", "setState",
"SetStateOut | ofService"
]
}
},
},
"access_coarse_location":
| {
"permission": "android.permission.ACCESS_COARSE_LOCATION",
"classes":
{
"android.location.LocationManager":
{
"methods":
[
"requestLocationUpdates", "getProviders", "requestSingleUpdate",
"getProvider", "getLastKnownLocation", "isProviderEnabled",
"addProximityAlert", "requestLocationUpdates", "getBestProvider",
"sendExtraCommand"
]
},
"android.telephony.TelephonyManager":
{
"methods":
[
"getNeighboingCellInfo", "getCellLocation", "listen"
]
}
},
},
"access_fine_location":
{
"permission": "android.permission.ACCESS_FINE_LOCATION",
"classes":
{
"android.location.LocationManager":
{
"methods":
[
"requestLocationUpdates", "getProviders", "requestSingleUpdate",
"getProvider", "getLastKnownLocation", "isProviderEnabled",
"addProximityAlert", "requestLocationUpdates", "getBestProvider",
"sendExtraCommand", "addNmeaListner", "addGpsStatusListener"
]
},
"android.telephony.TelephonyManager":
{
"methods":
[
"getNeighboingCellInfo", "getCellLocation"
]
}
},
},
"access_network_state":
{
"permission": "android.permission.ACCESS_NETWORK_STATE",
"class":
{
"android.net.ConnectivityManager":
{
"methods":
[
"getNetworkInfo", "isActiveNetworkMetered", "getNetworkPreferences",
"getActiveNetworkInfo", "getAllNetworkInfo", "stopUsingNetworkFeature",
"startUsingNetworkFeature"
]
}
},
},
}
|
imaluengo/SMURFS-Superpixels | run_smurfs.py | Python | mit | 1,927 | 0.016087 | #!/usr/bin/env python
import argparse
import os
import numpy as np
from skimage import io, util
from pysmurfs import SMURFS, qSMURFS, rSMURFS, qrSMURFS, visualize
base_dir = os.path.dirname(os.path.realpath(__file__))
smurfs_desc ='SMURFS: Superpixels from Multiscale Refinement of Super-regions'
parser = argparse.ArgumentParser(description=smurfs_desc)
parser.add_argument('input_file', type=str,
help='Input RGB image file path')
parser.add_argument('num_superpixels', type=int,
help='Desired number of superpixels.')
parser.add_argument('--quick', dest='quick', action='store_true',
help='Run only one iteration of the algorithm.')
parser.add_argument('--regular', dest='regular', action='store_true',
help='Obtain regular (more square) superpixels.')
parser.add_argument('--out', type=str, default=base_dir,
help='Output folder. If not given, result will be saved in '\
'the current folder.')
parser.add_argument('--plot', dest='plot', action='store_true',
help='Show plot with results after finishing.')
args = parser.parse_args()
try:
img = io.imread(args.input_file)
img = util.img_as_float(img).astype(np.float32)
except Exception, e:
raise Exception('Invalid image file: {}'.format(args | .input_file))
if args.num_superpixels <= 0 or args.num_superpixels >= np.prod(img.shape[:2]):
raise Exception('Invalid number of superpixels: {}'.format(args.num_superpixels))
if args.regular:
if args.quick:
result = qrSMURFS(img, args.num_superpixels)
else:
result = qSMURFS(img, args.num_superpixels)
else:
if args.quick:
result = qSMURFS(img, args.num_superpixels)
else:
| result = SMURFS(img, args.num_superpixels)
fileId = os.path.basename(args.input_file)
fileId = fileId[:fileId.rfind('.')]
out_file = 'result-{}.png'.format(fileId)
out_file = os.path.join(args.out, out_file)
io.imsave(out_file, result.astype(np.uint16))
if args.plot:
visualize(img, result)
|
SylvainCecchetto/plugin.video.catchuptvandmore | plugin.video.catchuptvandmore/resources/lib/channels/ch/lemanbleu.py | Python | gpl-2.0 | 5,905 | 0.000339 | # -*- coding: utf-8 -*-
"""
Catch-up TV & More
Copyright (C) 2019 SylvainCecchetto
This file is part of Catch-up TV & More.
Catch-up TV & More is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
Catch-up TV & More is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with Catch-up TV & More; if not, write to the Free Software Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
# The unicode_literals import only has
# an effect on Python 2.
# It makes string literals as unicode like in Python 3
from __future__ import unicode_literals
from builtins import str
from codequick import Route, Resolver, Listitem, utils, Script
from resources.lib import web_utils
from resources.lib import download
from resources.lib.menu_utils import item_post_treatment
import json
import re
import urlquick
from kodi_six import xbmcgui
# TO DO
# Add replay
URL_ROOT = 'http://www.lemanbleu.ch'
# Live
URL_LIVE = URL_ROOT + '/fr/Live.html'
URL_INFOMANIAK_LIVE = 'http://livevideo.infomaniak.com/iframe.php?stream=naxoo&name=test&player=%s'
# Player
URL_REPLAY = URL_ROOT + '/replay/video.html'
URL_VIDEOS = URL_ROOT + '/Scripts/Modules/CustomView/List.aspx?idn=9667&name=ReplaySearch&EmissionID=%s&pg=%s'
# program_id
QUALITIES_STREAM = ['sd', 'md', 'hq', 'hd']
@Route.register
def list_programs(plugin, item_id, **kwargs):
"""
Build categories listing
- Les Programmes
- ...
"""
resp = urlquick.get(URL_REPLAY)
root = resp.parse("ul", attrs={"id": "itemFilters"})
for program_datas in root.iterfind(".//li"):
program_title = program_datas.find('.//a').text
program_id = re.compile(r'this\,(.*?)\)').findall(
program_datas.find('.//a').get('onclick'))[0]
item = Listitem()
item.label = program_title
item.set_callback(list_videos,
item_id=item_id,
program_id=program_id,
page='1')
item_post_treatment(item)
yield item
@Route.register
def list_videos(plugin, item_id, program_id, page, **kwargs):
resp = urlquick.get(URL_VIDEOS % (program_id, page))
root = resp.parse()
for video_datas in root.iterfind(".//li[@class='item']"):
video_title = video_datas.find('.//h3').text
video_image = URL_ROOT + video_datas.find('.//img').get('src')
video_plot = video_datas.find('.//p').text
video_url = URL_ROOT + video_datas.find('.//a').get('href')
date_value = video_datas.find(".//span[@class='date']").text.split(
' ')[1]
item = Listitem()
item.label = video_title
item.art['thumb'] = item.art['landscape'] = video_image
item.info['plot'] = video_plot
item.info.date(date_value, '%d.%m.%Y')
item.set_callback(get_video_url,
item_id=item_id,
video_url=video_url)
item_post_treatment(item, is_playable=True, is_downloadable=True)
yield item
yield Listitem.next_page(item_id=item_id,
program_id=program_id,
page=str(int(page) + 1))
@Resolver.register
def get_video_url(plugin,
item_id,
video_url,
download_mode=False,
**kwargs):
resp = urlquick.get(video_url, max_age=-1)
stream_url = re.compile(r'og\:video\" content\=\"(.*?)\"').findall(
resp.text)[0]
desired_quality = Script.setting.get_string('quality')
all_datas_videos_quality = []
all_datas_videos_path = []
for quality in QUALITIES_STREAM:
all_datas_videos_quality.append(quality)
if quality == 'sd':
all_datas_videos_path.append(
stream_url.replace('mp4-231', 'mp4-322'))
elif quality == 'm | d':
all_datas_videos_path.append(
stream_url.replace('mp4-231', 'mp4-323'))
elif quality == 'hq':
all_datas_videos_path.append(
stream_url.replace('mp4-231', 'mp4-12')) |
else:
all_datas_videos_path.append(stream_url)
url = ''
if desired_quality == "DIALOG":
seleted_item = xbmcgui.Dialog().select(
plugin.localize(30709),
all_datas_videos_quality)
if seleted_item == -1:
url = ''
url = all_datas_videos_path[seleted_item]
elif desired_quality == "BEST":
url_best = ''
for data_video in all_datas_videos_path:
url_best = data_video
url = url_best
else:
url = all_datas_videos_path[0]
if download_mode:
return download.download_video(url)
return url
@Resolver.register
def get_live_url(plugin, item_id, **kwargs):
resp = urlquick.get(URL_LIVE)
player_id = re.compile(r'\&player\=(.*?)\"').findall(resp.text)[0]
session_urlquick = urlquick.Session(allow_redirects=False)
resp2 = session_urlquick.get(URL_INFOMANIAK_LIVE % player_id)
location_url = resp2.headers['Location']
resp3 = urlquick.get(location_url.replace(
'infomaniak.com/', 'infomaniak.com/playerConfig.php'),
max_age=-1)
json_parser = json.loads(resp3.text)
stream_url = ''
for stram_datas in json_parser['data']['integrations']:
if 'hls' in stram_datas['type']:
stream_url = stram_datas['url']
return stream_url
|
ciandcd/ciandcd-web | htmlExtractor/HECommon.py | Python | mit | 5,015 | 0.02333 | import os
import re
from datetime import *
import feedparser
from newspaper import Article,Config
import ne | wspaper
import urllib.request
def writeFile(outPath,content):
file = open(outPath, 'w')
if file:
file.write( | content)
file.close()
else:
print ("Error Opening File " + outPath)
def writeHtml(outPath,content,title,link,date,authors,tags):
print('date:authors:tags' + date + authors + tags)
html = '''<!DOCTYPE html>
<html lang="zh-cn">
<head>
<meta charset="utf-8"/>
<title>
'''
html = html + title + '</title>'
if(isinstance(date,datetime)):
date = date.strftime('%Y-%m-%d %H:%M')
if date != '':
html = html + '<meta name="date" content="' + date + '"/>'
if authors != '':
html = html + '<meta name="authors" content="' + authors + '" />'
if tags != '':
html = html + '<meta name="tags" content="' + tags + '" />'
html = html + '</head><body>'
html = html + 'From:<a href=' + link + '>' + link + '</a><br><br>'
html = html + content + '</body></html>'
force = 0
if(force == 0):
if os.path.exists(outPath):
print("The file " + outPath + " is existed, will ignore.")
else:
writeFile(outPath,html)
print("save to:" + outPath)
else:
writeFile(outPath,html)
print("save to:" + outPath)
def getDomain(url):
m = re.search(r'http[s]?://(.*?)/',url)
if m:
return m.group()
else:
return ''
def fixLinks(html,link):
def f(m):
return link + m.group(1)
reobj = re.compile('href="(/.*?)"')
new = reobj.sub(f,html)
return new
def getLinks(url,regex):
website = urllib.request.urlopen(url)
html = website.read().decode('utf-8')
regex_new = '"(' + regex + ')"'
print('regex:' + regex_new)
links = re.findall(regex_new, html)
return list(set(links))
def downloadFile(link,category,config,outputDir,date,tags):
print('download article from:' + link)
try:
try:
a = Article(link,config=config, keep_article_html=True)
a.download()
a.parse()
except Exception as e:
print("Error for download and parser:" + link)
print(e)
return 0
if a.title == '':
print("cannot find title for " + link)
return 0
print('title:' + a.title)
title2 = re.sub(' ','_',a.title)
title2 = re.sub('/','_',title2)
outFileDir = outputDir + os.sep + category + os.sep
if not os.path.exists(outFileDir):
os.makedirs(outFileDir)
outPath = outFileDir + title2 + '.html'
content = a.text
content_html = a.article_html
date2 = ''
try:
date2 = a.publish_date
except Exception as e:
print("Warning:cannot find date")
if(date2):
date = date2
authors = ','.join(a.authors)
if(content_html):
domain = getDomain(link)
content_html = fixLinks(content_html,domain)
writeHtml(outPath,content_html,a.title,link,date,authors,tags)
elif(content):
writeHtml(outPath,content,a.title,link,date,authors,tags)
else:
print('Error:cannot find content')
except Exception as e:
print('Exception:' + link)
print(e)
return 0
return 1
def downloadArticles(url,category,config,outputDir,max_number,regex_for_links,tags):
print('download from articles:' + url)
all = getLinks(url,regex_for_links)
for article in all[:max_number]:
downloadFile(article,category,config,outputDir,'',tags)
def downloadFeed(feed,category,config,outputDir,max_number,tags):
print('download from feed:' + feed)
d = feedparser.parse(feed)
for entry in d.entries[:max_number]:
print('entry:' + entry.title + ' ' + entry.link)
#today = datetime.today()
#days_ago = today - timedelta(days=max_days)
#d = datetime(entry.published_parsed)
#if(d < days_ago):
# continue
date = ''
try:
date = entry.published
except Exception as e:
print(e)
downloadFile(entry.link,category,config,outputDir,date,tags)
def downloadByConfig(urls,config,outputDir,max_number):
print('download from config')
for category in urls.keys():
print('category:' + category)
us = urls[category]
for u in us:
u2,type,regex_for_links,tags = u.split(',')
tags = re.sub(':',',',tags)
if(type == 'feed'):
downloadFeed(u2,category,config,outputDir,max_number,tags)
elif(type == 'articles'):
downloadArticles(u2,config,outputDir,category,max_number,regex_for_links,tags)
else: #article
downloadFile(u2,category,config,outputDir,'',tags)
|
partofthething/home-assistant | homeassistant/components/template/binary_sensor.py | Python | apache-2.0 | 7,122 | 0.000983 | """Support for exposing a templated binary sensor."""
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASSES_SCHEMA,
ENTITY_ID_FORMAT,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
CONF_DEVICE_CLASS,
CONF_ENTITY_PICTURE_TEMPLATE,
CONF_ICON_TEMPLATE,
CONF_SENSORS,
CONF_UNIQUE_ID,
CONF_VALUE_TEMPLATE,
)
from homeassistant.core import callback
from homeassistant.exceptions import TemplateError
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import async_generate_entity_id
from homeassistant.helpers.event import async_call_later
from homeassistant.helpers.reload import async_setup_reload_service
from homeassistant.helpers.template import result_as_boolean
from .const import CONF_AVAILABILITY_TEMPLATE, DOMAIN, PLATFORMS
from .template_entity import TemplateEntity
CONF_DELAY_ON = "delay_on"
CONF_DELAY_OFF = "delay_off"
CONF_ATTRIBUTE_TEMPLATES = "attribute_templates"
SENSOR_SCHEMA = vol | .All(
cv.deprecated(ATTR_ENTITY_ID),
vol.Schema(
{
vol.Required(CONF_VALUE_TEMPLATE): cv.template,
| vol.Optional(CONF_ICON_TEMPLATE): cv.template,
vol.Optional(CONF_ENTITY_PICTURE_TEMPLATE): cv.template,
vol.Optional(CONF_AVAILABILITY_TEMPLATE): cv.template,
vol.Optional(CONF_ATTRIBUTE_TEMPLATES): vol.Schema(
{cv.string: cv.template}
),
vol.Optional(ATTR_FRIENDLY_NAME): cv.string,
vol.Optional(ATTR_ENTITY_ID): cv.entity_ids,
vol.Optional(CONF_DEVICE_CLASS): DEVICE_CLASSES_SCHEMA,
vol.Optional(CONF_DELAY_ON): vol.Any(cv.positive_time_period, cv.template),
vol.Optional(CONF_DELAY_OFF): vol.Any(cv.positive_time_period, cv.template),
vol.Optional(CONF_UNIQUE_ID): cv.string,
}
),
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_SENSORS): cv.schema_with_slug_keys(SENSOR_SCHEMA)}
)
async def _async_create_entities(hass, config):
"""Create the template binary sensors."""
sensors = []
for device, device_config in config[CONF_SENSORS].items():
value_template = device_config[CONF_VALUE_TEMPLATE]
icon_template = device_config.get(CONF_ICON_TEMPLATE)
entity_picture_template = device_config.get(CONF_ENTITY_PICTURE_TEMPLATE)
availability_template = device_config.get(CONF_AVAILABILITY_TEMPLATE)
attribute_templates = device_config.get(CONF_ATTRIBUTE_TEMPLATES, {})
friendly_name = device_config.get(ATTR_FRIENDLY_NAME, device)
device_class = device_config.get(CONF_DEVICE_CLASS)
delay_on_raw = device_config.get(CONF_DELAY_ON)
delay_off_raw = device_config.get(CONF_DELAY_OFF)
unique_id = device_config.get(CONF_UNIQUE_ID)
sensors.append(
BinarySensorTemplate(
hass,
device,
friendly_name,
device_class,
value_template,
icon_template,
entity_picture_template,
availability_template,
delay_on_raw,
delay_off_raw,
attribute_templates,
unique_id,
)
)
return sensors
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the template binary sensors."""
await async_setup_reload_service(hass, DOMAIN, PLATFORMS)
async_add_entities(await _async_create_entities(hass, config))
class BinarySensorTemplate(TemplateEntity, BinarySensorEntity):
"""A virtual binary sensor that triggers from another sensor."""
def __init__(
self,
hass,
device,
friendly_name,
device_class,
value_template,
icon_template,
entity_picture_template,
availability_template,
delay_on_raw,
delay_off_raw,
attribute_templates,
unique_id,
):
"""Initialize the Template binary sensor."""
super().__init__(
attribute_templates=attribute_templates,
availability_template=availability_template,
icon_template=icon_template,
entity_picture_template=entity_picture_template,
)
self.entity_id = async_generate_entity_id(ENTITY_ID_FORMAT, device, hass=hass)
self._name = friendly_name
self._device_class = device_class
self._template = value_template
self._state = None
self._delay_cancel = None
self._delay_on = None
self._delay_on_raw = delay_on_raw
self._delay_off = None
self._delay_off_raw = delay_off_raw
self._unique_id = unique_id
async def async_added_to_hass(self):
"""Register callbacks."""
self.add_template_attribute("_state", self._template, None, self._update_state)
if self._delay_on_raw is not None:
try:
self._delay_on = cv.positive_time_period(self._delay_on_raw)
except vol.Invalid:
self.add_template_attribute(
"_delay_on", self._delay_on_raw, cv.positive_time_period
)
if self._delay_off_raw is not None:
try:
self._delay_off = cv.positive_time_period(self._delay_off_raw)
except vol.Invalid:
self.add_template_attribute(
"_delay_off", self._delay_off_raw, cv.positive_time_period
)
await super().async_added_to_hass()
@callback
def _update_state(self, result):
super()._update_state(result)
if self._delay_cancel:
self._delay_cancel()
self._delay_cancel = None
state = None if isinstance(result, TemplateError) else result_as_boolean(result)
if state == self._state:
return
# state without delay
if (
state is None
or (state and not self._delay_on)
or (not state and not self._delay_off)
):
self._state = state
return
@callback
def _set_state(_):
"""Set state of template binary sensor."""
self._state = state
self.async_write_ha_state()
delay = (self._delay_on if state else self._delay_off).seconds
# state with delay. Cancelled if template result changes.
self._delay_cancel = async_call_later(self.hass, delay, _set_state)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self):
"""Return the unique id of this binary sensor."""
return self._unique_id
@property
def is_on(self):
"""Return true if sensor is on."""
return self._state
@property
def device_class(self):
"""Return the sensor class of the binary sensor."""
return self._device_class
|
nkhuyu/airflow | airflow/www/app.py | Python | apache-2.0 | 68,914 | 0.000653 | from __future__ import print_function
from __future__ import division
from builtins import str
from past.utils import old_div
import copy
from datetime import datetime, timedelta
import dateutil.parser
from functools import wraps
import inspect
import json
import logging
import os
import socket
import sys
import time
from flask._compat import PY2
from flask import (
Flask, url_for, Markup, Blueprint, redirect,
flash, Response, render_template)
from flask.ext.admin import Admin, BaseView, expose, AdminIndexView
from flask.ext.admin.form import DateTimePickerWidget
from flask.ext.admin import base
from flask.ext.admin.contrib.sqla import ModelView
from flask.ext.cache import Cache
from flask import request
import sqlalchemy as sqla
from wtforms import (
widgets,
Form, DateTimeField, SelectField, TextAreaField, PasswordField, StringField)
from pygments import highlight, lexers
from pygments.formatters import HtmlFormatter
import chartkick
import jinja2
import markdown
from sqlalchemy import or_
import airflow
from airflow import jobs, login, models, settings, utils
from airflow.configuration import conf
from airflow.models import State
from airflow.settings import Session
from airflow.utils import AirflowException
from airflow.www import utils as wwwutils
login_required = login.login_required
current_user = login.current_user
logout_user = login.logout_user
from airflow import default_login as login
if conf.getboolean('webserver', 'AUTHENTICATE'):
try:
# Environment specific login
import airflow_login as login
except ImportError:
logging.error(
"authenticate is set to True in airflow.cfg, "
"but airflow_login failed to import")
login_required = login.login_required
current_user = login.current_user
logout_user = login.logout_user
AUTHENTICATE = conf.getboolean('webserver', 'AUTHENTICATE')
if AUTHENTICATE is False:
login_required = lambda x: x
FILTER_BY_OWNER = False
if conf.getboolean('webserver', 'FILTER_BY_OWNER'):
# filter_by_owner if authentication is enabled and filter_by_owner is true
FILTER_BY_OWNER = AUTHENTICATE
class VisiblePasswordInput(widgets.PasswordInput):
def __init__(self, hide_value=False):
self.hide_value = hide_value
class VisiblePasswordField(PasswordField):
widget = VisiblePasswordInput()
def superuser_required(f):
'''
Decorator for views requiring superuser access
'''
@wraps(f)
def decorated_function(*args, **kwargs):
if (
not AUTHENTICATE or
(not current_user.is_anonymous() and current_user.is_superuser())
):
return f(*args, **kwargs)
else:
flash("This page requires superuser privileges", "error")
return redirect(url_for('admin.index'))
return decorated_function
def data_profiling_required(f):
'''
Decorator for views requiring data profiling access
'''
@wraps(f)
def decorated_function(*args, **kwargs):
if (
not AUTHENTICATE or
(not current_user.is_anonymous() and current_user.data_profiling())
):
return f(*args, **kwargs)
else:
flash("This page requires data profiling privileges", "error")
return redirect(url_for('admin.index'))
return decorated_function
QUERY_LIMIT = 100000
CHART_LIMIT = 200000
def pygment_html_render(s, lexer=lexers.TextLexer):
return highlight(
s,
lexer(),
HtmlFormatter(linenos=True),
)
def wrapped_markdown(s):
return '<div class="rich_doc">' + markdown.markdown(s) + "</div>"
attr_renderer = {
'bash_command': lambda x: pygment_html_render(x, lexers.BashLexer),
'hql': lambda x: pygment_html_render(x, lexers.SqlLexer),
'sql': lambda x: pygment_html_render(x, lexers.SqlLexer),
'doc': lambda x: pygment_html_render(x, lexers.TextLexer),
'doc_json': lambda x: pygment_html_render(x, lexers.JsonLexer),
'doc_rst': lambda x: pygment_html_render(x, lexers.RstLexer),
'doc_yaml': lambda x: pygment_html_render(x, lexers.YamlLexer),
'doc_md': wrapped_markdown,
'python_callable': lambda x: pygment_html_render(
inspect.getsource(x), lexers.PythonLexer),
}
dagbag = models.DagBag(os.path.expanduser(conf.get('core', 'DAGS_FOLDER')))
utils.pessimistic_connection_handling()
app = Flask(__name__)
app.config['SQLALCHEMY_POOL_RECYCLE'] = 3600
app.secret_key = conf.get('webserver', 'SECRET_KEY')
login.login_manager.init_app(app)
cache = Cache(
app=app, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': '/tmp'})
# Init for chartkick, the python wrapper for highcharts
ck = Blueprint(
'ck_page', __name__,
static_folder=chartkick.js(), static_url_path='/static')
app.register_blueprint(ck, url_prefix='/ck')
app.jinja_env.add_extension("chartkick.ext.charts")
@app.context_processor
def jinja_globals():
return {
'hostname': socket.gethostname(),
}
class DateTimeForm(Form):
# Date filter form needed for gantt and graph view
execution_date = DateTimeField(
"Execution date", widget=DateTimePickerWidget())
class GraphForm(Form):
execution_date = DateTimeField(
"Execution date", widget=DateTimePickerWidget())
arrange = SelectField("Layout", choices=(
('LR', "Left->Right"),
('RL', "Right->Left"),
('TB', "Top->Bottom"),
('BT', "Bottom->Top"),
))
class TreeForm(Form):
base_date = DateTimeField(
"Anchor date", widget=DateTimePickerWidget(), default=datetime.now())
num_runs = SelectField("Number of runs", default=25, choices=(
(5, "5"),
(25, "25"),
(50, "50"),
(100, "100"),
(365, "365"),
))
@app.route('/')
def index():
return redirect(url_for('admin.index'))
@app.route('/health')
def health():
""" We can add an array of tests here to check the server's health """
content = Markup(markdown.markdown("The server is healthy!"))
return content
@app.teardown_appcontext
def shutdown_session(excepti | on=None):
settings.Session.remove()
def dag_link(v, c, m, p):
url = url_for(
'airflow.gra | ph',
dag_id=m.dag_id)
return Markup(
'<a href="{url}">{m.dag_id}</a>'.format(**locals()))
class DagModelView(wwwutils.SuperUserMixin, ModelView):
column_list = ('dag_id', 'owners')
column_editable_list = ('is_paused',)
form_excluded_columns = ('is_subdag', 'is_active')
column_searchable_list = ('dag_id',)
column_filters = (
'dag_id', 'owners', 'is_paused', 'is_active', 'is_subdag',
'last_scheduler_run', 'last_expired')
form_widget_args = {
'last_scheduler_run': {'disabled': True},
'fileloc': {'disabled': True},
'is_paused': {'disabled': True},
'last_pickled': {'disabled': True},
'pickle_id': {'disabled': True},
'last_loaded': {'disabled': True},
'last_expired': {'disabled': True},
'pickle_size': {'disabled': True},
'scheduler_lock': {'disabled': True},
'owners': {'disabled': True},
}
column_formatters = dict(
dag_id=dag_link,
)
can_delete = False
can_create = False
page_size = 50
list_template = 'airflow/list_dags.html'
named_filter_urls = True
def get_query(self):
"""
Default filters for model
"""
return (
super(DagModelView, self)
.get_query()
.filter(or_(models.DagModel.is_active, models.DagModel.is_paused))
.filter(~models.DagModel.is_subdag)
)
def get_count_query(self):
"""
Default filters for model
"""
return (
super(DagModelView, self)
.get_count_query()
.filter(models.DagModel.is_active)
.filter(~models.DagModel.is_subdag)
)
class HomeView(AdminIndexView):
@expose("/")
@login_required
def index(self):
session = Session()
DM = models.DagModel
qry = None
# filter the dags if filter_by_owner and current user is not superuser
|
khchine5/lino | lino/management/commands/makescreenshots.py | Python | bsd-2-clause | 8,696 | 0.005175 | # -*- coding: UTF-8 -*-
# Copyright 2012-2013 Luc Saffre
# License: BSD (see file COPYING for details)
"""
Writes screenshots to <project_dir>/media/cache/screenshots
"""
# from future import standard_library
# standard_library.install_aliases()
from builtins import str
import logging
logger = logging.getLogger(__name__)
import subprocess
import os
import errno
#~ import codecs
import sys
from optparse import make_option
from os.path import join
from multiprocessing import Process
from django.db import models
from django.utils.translation import ugettext as _
from django.utils import translation
from django.utils.encoding import force_text
from django.core.management import call_command
from django.core.management.base import BaseCommand, CommandError
from django.core.servers.basehttp import get_internal_wsgi_application
from django.core.servers.basehttp import WSGIRequestHandler
from django.conf import settings
#~ from django.test import LiveServerTestCase
from django.test.testcases import StoppableWSGIServer
from lino.core.utils import obj2str, full_model_name, sorted_models_list
from lino.utils import screenshots
from atelier.utils import SubProcessParent
PHANTOMJS = '/home/luc/snapshots/phantomjs-1.9.0-linux-i686/bin/phantomjs'
JS_SRC = """
function waitfor(msg,until,limit,todo) {
if (until()) {
console.log("Done",msg);
todo(true);
return;
};
if (limit <= 0) {
console.log("Giving up",msg);
todo(false);
//~ task_done(msg,false);
return;
};
// console.log('Retry',msg,'('+String(limit),"attempts left)");
window.setTimeout(function() { waitfor(msg,until,limit-1,todo)},1000);
};
var output = '%(target)s';
var address = '%(url)s';
// phantom.addCookie({ username: '%(username)s', password: '%(password)s'});
var data = 'username=%(username)s&password=%(password)s';
var page = require('webpage').create();
page.open('http://127.0.0.1:8000/auth','post',data,function (status) {
// console.log('opened auth!');
if (status !== 'success') {
console.log('Unable to authenticate!');
phantom.exit();
}});
var page = require('webpage').create();
// page.settings = { userName: '%(username)s', password: '%(password)s'};
// page.customHeaders = { %(remote_user_header)s: '%(username)s'};
// page.customHeaders = { 'HTTP_%(remote_user_header)s': '%(username)s'};
page.viewportSize = { width: 1400, height: 800};
// page.viewportSize = { width: 1024, height: 768};
// page.viewportSize = { width: 1366, height: 744};
// page.viewportSize = { width: 800, height: 600};
page.onConsoleMessage = function (msg) { console.log(msg); };
page.onError = function (msg, trace) {
console.log(msg);
trace.forEach(function(item) {
console.log(' ', item.file, ':', item.line);
})
}
var is_loaded = function() {
return page.evaluate(function() {
// console.log('evaluate()');
// return !Ext.Ajax.isLoading();
// return (document.readyState == 'complete');
if (typeof Lino != "undefined") {
if (Lino.current_window) {
if (!Lino.current_window.main_item.is_loading())
return true;
// console.log("Lino.current_window still loading in ",document.documentElement.innerHTML);
// console.log("Lino.current_window", Lino.current_window.main_item,"still loading." );
// return true;
}
}
// console.log("No Lino in ",document.documentElement.innerHTML);
// console.log("No Lino in response");
return false;
}
);
};
var todo = function(ok) {
console.log("Rendering to",output,ok);
page.render(output);
if (ok)
phantom.exit();
else
phantom.exit(2);
};
var on_opened = function(status) {
if (status !== 'success') {
console.log('Unable to load ',address,'status is:',status);
phantom.exit(1);
} else {
waitfor(output,is_loaded,6,todo);
}
};
console.lo | g("Loading",address,'to',output);
page.open(address,on_o | pened);
"""
class Command(BaseCommand):
help = __doc__
option_list = BaseCommand.option_list + (
make_option('--force', action='store_true',
dest='force', default=False,
help='Overwrite existing files.'),
)
def handle(self, *args, **options):
if len(args):
raise CommandError("Unexpected arguments %r" % args)
# Igor Katson writes an interesting answer in
# `Django Broken pipe in Debug mode
# <http://stackoverflow.com/questions/7912672/django-broken-pipe-in-debug-mode>`__::
# Monkeypatch python not to print "Broken Pipe" errors to stdout.
import socketserver
from wsgiref import handlers
socketserver.BaseServer.handle_error = lambda *args, **kwargs: None
handlers.BaseHandler.log_exception = lambda *args, **kwargs: None
main(force=options['force'])
#~ main()
#~ ADDR = "http://127.0.0.1"
ADDR = "127.0.0.1"
PORT = 8000
HTTPD = None
def start_server(self):
try:
handler = get_internal_wsgi_application()
self.HTTPD = StoppableWSGIServer(('', PORT), WSGIRequestHandler)
self.HTTPD.set_app(handler)
self.HTTPD.serve_forever()
except KeyboardInterrupt:
sys.exit(0)
def stop_server(self):
#~ server.terminate()
self.HTTPD.shutdown()
self.HTTPD.server_close()
logger.info("terminated server.")
def main(self, force=False, **kw):
settings.SITE.startup()
outputbase = os.path.join(settings.MEDIA_ROOT, 'cache', 'screenshots')
urlbase = "http://" + ADDR + ":" + str(PORT)
#~ urlbase="http://127.0.0.1:8000"
pp = SubProcessParent()
server = Process(target=self.start_server)
server.start()
#~ server.join()
logger.info("started the server")
count = 0
assert not settings.SITE.remote_user_header
try:
for lng in settings.SITE.languages:
if lng.django_code == 'de': # temporary
for ss in screenshots.get_screenshots(lng.django_code):
#~ print "20130515 got screenshot", ss
target = ss.get_filename(outputbase)
if not force and os.path.exists(target):
logger.info("%s exists", target)
continue
for fn in (target, target + '.log'):
if os.path.exists(fn):
os.remove(fn)
url = ss.get_url(urlbase)
if url is None:
logger.info("No url for %s", target)
continue
logger.info("Build %s...", target)
ctx = dict(
url=url,
target=target,
username=ss.ar.get_user().username)
ctx.update(password='1234')
ctx.update(
remote_user_header=settings.SITE.remote_user_header)
f = file('tmp.js', 'wt')
f.write(JS_SRC % ctx)
f.close()
args = [PHANTOMJS]
args += ['--cookies-file=phantomjs_cookies.txt']
args += ['--disk-cache=true']
args += ['tmp.js']
try:
output = pp.check_output(args, **kw)
except subprocess.CalledProcessError as e:
output = e.output
file(target + '.log', 'wt').write(output)
count += 1
#~ p = pp.open_subprocess(args,**kw)
#~ p.wait()
#~ rc = p.returncode
#~ if rc != 0:
|
yannrouillard/weboob | weboob/capabilities/bill.py | Python | agpl-3.0 | 5,143 | 0.004667 | # -*- coding: utf-8 -*-
# Copyright(C) 2012 Romain Bignon, Florent Fourcot
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more d | etails.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from .base import CapBaseObject, StringField, DateField, DecimalField, UserError
from .collection import ICapCollection
__all__ = ['SubscriptionNotFound', 'BillNotFound', 'Detail', 'Bill', 'Subscription', 'ICapBill']
|
class SubscriptionNotFound(UserError):
"""
Raised when a subscription is not found.
"""
def __init__(self, msg='Subscription not found'):
UserError.__init__(self, msg)
class BillNotFound(UserError):
"""
Raised when a bill is not found.
"""
def __init__(self, msg='Bill not found'):
UserError.__init__(self, msg)
class Detail(CapBaseObject):
"""
Detail of a subscription
"""
label = StringField('label of the detail line')
infos = StringField('information')
datetime = DateField('date information')
price = DecimalField('Total price, taxes included')
vat = DecimalField('Value added Tax')
currency = StringField('Currency', default=None)
quantity = DecimalField('Number of units consumed')
unit = StringField('Unit of the consumption')
def __init__(self):
CapBaseObject.__init__(self, 0)
class Bill(CapBaseObject):
"""
Bill.
"""
date = DateField('The day the bill has been sent to the subscriber')
format = StringField('file format of the bill')
label = StringField('label of bill')
idparent = StringField('id of the parent subscription')
price = DecimalField('Price to pay')
currency = StringField('Currency', default=None)
deadline = DateField('The latest day to pay')
startdate = DateField('The first day the bill applies to')
finishdate = DateField('The last day the bill applies to')
def __init__(self):
CapBaseObject.__init__(self, 0)
class Subscription(CapBaseObject):
"""
Subscription to a service.
"""
label = StringField('label of subscription')
subscriber = StringField('whe has subscribed')
validity = DateField('End validity date of the subscription')
renewdate = DateField('Reset date of consumption')
class ICapBill(ICapCollection):
def iter_resources(self, objs, split_path):
"""
Iter resources. Will return :func:`iter_subscriptions`.
"""
if Subscription in objs:
self._restrict_level(split_path)
return self.iter_subscription()
def iter_subscription(self):
"""
Iter subscriptions.
:rtype: iter[:class:`Subscription`]
"""
raise NotImplementedError()
def get_subscription(self, _id):
"""
Get a subscription.
:param _id: ID of subscription
:rtype: :class:`Subscription`
:raises: :class:`SubscriptionNotFound`
"""
raise NotImplementedError()
def iter_bills_history(self, subscription):
"""
Iter history of a subscription.
:param subscription: subscription to get history
:type subscription: :class:`Subscription`
:rtype: iter[:class:`Detail`]
"""
raise NotImplementedError()
def get_bill(self, id):
"""
Get a bill.
:param id: ID of bill
:rtype: :class:`Bill`
:raises: :class:`BillNotFound`
"""
raise NotImplementedError()
def download_bill(self, id):
"""
Download a bill.
:param id: ID of bill
:rtype: str
:raises: :class:`BillNotFound`
"""
raise NotImplementedError()
def iter_bills(self, subscription):
"""
Iter bills.
:param subscription: subscription to get bills
:type subscription: :class:`Subscription`
:rtype: iter[:class:`Bill`]
"""
raise NotImplementedError()
def get_details(self, subscription):
"""
Get details of a subscription.
:param subscription: subscription to get details
:type subscription: :class:`Subscription`
:rtype: iter[:class:`Detail`]
"""
raise NotImplementedError()
def get_balance(self, subscription):
"""
Get the balance of a subscription.
:param subscription: subscription to get balance
:type subscription: :class:`Subscription`
:rtype :class:`Detail`
"""
raise NotImplementedError()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.