repo_name
stringlengths 5
100
| ref
stringlengths 12
67
| path
stringlengths 4
244
| copies
stringlengths 1
8
| content
stringlengths 0
1.05M
⌀ |
|---|---|---|---|---|
atvcaptain/enigma2
|
refs/heads/6.5
|
lib/python/Components/Converter/VfdDisplay.py
|
1
|
from __future__ import absolute_import
from datetime import datetime
from Components.Converter.Poll import Poll
from Components.Converter.Converter import Converter
from Components.Element import cached
class VfdDisplay(Poll, Converter, object):
def __init__(self, type):
Converter.__init__(self, type)
Poll.__init__(self)
self.num = None
self.showclock = 0
self.delay = 5000
self.loop = -1
self.type = type.lower().split(';')
if 'number' in self.type and 'clock' not in self.type: # Only channel number
self.delay = 0
self.poll_enabled = False
else:
self.poll_enabled = True
if 'clock' in self.type and 'number' not in self.type: # Only clock
self.showclock = 1
self.delay = -1
else:
for x in self.type:
if x.isdigit():
self.delay = int(x) * 1000
break
if 'loop' in self.type and self.delay:
self.loop = self.delay
if '12h' in self.type and 'nozero' in self.type:
self.hour = '%l'
elif '12h' in self.type:
self.hour = '%I'
elif 'nozero' in self.type:
self.hour = '%k'
else:
self.hour = '%H'
@cached
def getText(self):
if hasattr(self.source, 'text'):
if 'nozero' in self.type:
return self.source.text.rjust(4)
else:
return self.source.text.zfill(4)
if self.showclock == 0:
if self.delay:
self.poll_interval = self.delay
self.showclock = 1
if self.num:
return self.num
else:
if self.showclock == 1:
if 'noblink' in self.type:
self.poll_interval = self.delay
else:
self.poll_interval = 1000
self.showclock = 3
clockformat = self.hour + '%02M'
elif self.showclock == 2:
self.showclock = 3
clockformat = self.hour + '%02M'
else:
self.showclock = 2
clockformat = self.hour + ':%02M'
if self.loop != -1:
self.loop -= 1000
if self.loop <= 0:
self.loop = self.delay
self.showclock = 0
return datetime.today().strftime(clockformat)
text = property(getText)
def changed(self, what):
if what[0] is self.CHANGED_SPECIFIC and self.delay >= 0:
self.showclock = 0
if self.loop != -1:
self.loop = self.delay
service = self.source.serviceref
self.num = service and ('%4d' if 'nozero' in self.type else '%04d') % service.getChannelNum() or None
Converter.changed(self, what)
elif what[0] is self.CHANGED_POLL:
Converter.changed(self, what)
elif what[0] is self.CHANGED_ALL:
Converter.changed(self, what)
|
mm1ke/portage
|
refs/heads/master
|
pym/portage/util/_async/PipeReaderBlockingIO.py
|
12
|
# Copyright 2012 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
try:
import threading
except ImportError:
# dummy_threading will not suffice
threading = None
from portage import os
from _emerge.AbstractPollTask import AbstractPollTask
class PipeReaderBlockingIO(AbstractPollTask):
"""
Reads output from one or more files and saves it in memory, for
retrieval via the getvalue() method. This is driven by a thread
for each input file, in order to support blocking IO. This may
be useful for using threads to handle blocking IO with Jython,
since Jython lacks the fcntl module which is needed for
non-blocking IO (see http://bugs.jython.org/issue1074).
"""
__slots__ = ("input_files", "_read_data", "_terminate",
"_threads", "_thread_rlock")
def _start(self):
self._terminate = threading.Event()
self._threads = {}
self._read_data = []
self._registered = True
self._thread_rlock = threading.RLock()
with self._thread_rlock:
for f in self.input_files.values():
t = threading.Thread(target=self._reader_thread, args=(f,))
t.daemon = True
t.start()
self._threads[f] = t
def _reader_thread(self, f):
try:
terminated = self._terminate.is_set
except AttributeError:
# Jython 2.7.0a2
terminated = self._terminate.isSet
bufsize = self._bufsize
while not terminated():
buf = f.read(bufsize)
with self._thread_rlock:
if terminated():
break
elif buf:
self._read_data.append(buf)
else:
del self._threads[f]
if not self._threads:
# Thread-safe callback to EventLoop
self.scheduler.idle_add(self._eof)
break
f.close()
def _eof(self):
self._registered = False
if self.returncode is None:
self.returncode = os.EX_OK
self.wait()
return False
def _cancel(self):
self._terminate.set()
self._registered = False
if self.returncode is None:
self.returncode = self._cancelled_returncode
self.wait()
def _wait(self):
if self.returncode is not None:
return self.returncode
self._wait_loop()
self.returncode = os.EX_OK
return self.returncode
def getvalue(self):
"""Retrieve the entire contents"""
with self._thread_rlock:
return b''.join(self._read_data)
def close(self):
"""Free the memory buffer."""
with self._thread_rlock:
self._read_data = None
|
proxysh/Safejumper-for-Desktop
|
refs/heads/master
|
buildlinux/env64/lib/python2.7/site-packages/twisted/trial/test/detests.py
|
16
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for Deferred handling by L{twisted.trial.unittest.TestCase}.
"""
from __future__ import division, absolute_import
from twisted.trial import unittest
from twisted.internet import defer, threads, reactor
from twisted.trial.util import suppress as SUPPRESS
from twisted.python.util import runWithWarningsSuppressed
class DeferredSetUpOK(unittest.TestCase):
def setUp(self):
d = defer.succeed('value')
d.addCallback(self._cb_setUpCalled)
return d
def _cb_setUpCalled(self, ignored):
self._setUpCalled = True
def test_ok(self):
self.assertTrue(self._setUpCalled)
class DeferredSetUpFail(unittest.TestCase):
testCalled = False
def setUp(self):
return defer.fail(unittest.FailTest('i fail'))
def test_ok(self):
DeferredSetUpFail.testCalled = True
self.fail("I should not get called")
class DeferredSetUpCallbackFail(unittest.TestCase):
testCalled = False
def setUp(self):
d = defer.succeed('value')
d.addCallback(self._cb_setUpCalled)
return d
def _cb_setUpCalled(self, ignored):
self.fail('deliberate failure')
def test_ok(self):
DeferredSetUpCallbackFail.testCalled = True
class DeferredSetUpError(unittest.TestCase):
testCalled = False
def setUp(self):
return defer.fail(RuntimeError('deliberate error'))
def test_ok(self):
DeferredSetUpError.testCalled = True
class DeferredSetUpNeverFire(unittest.TestCase):
testCalled = False
def setUp(self):
return defer.Deferred()
def test_ok(self):
DeferredSetUpNeverFire.testCalled = True
class DeferredSetUpSkip(unittest.TestCase):
testCalled = False
def setUp(self):
d = defer.succeed('value')
d.addCallback(self._cb1)
return d
def _cb1(self, ignored):
raise unittest.SkipTest("skip me")
def test_ok(self):
DeferredSetUpSkip.testCalled = True
class DeferredTests(unittest.TestCase):
touched = False
def _cb_fail(self, reason):
self.fail(reason)
def _cb_error(self, reason):
raise RuntimeError(reason)
def _cb_skip(self, reason):
raise unittest.SkipTest(reason)
def _touchClass(self, ignored):
self.__class__.touched = True
def setUp(self):
self.__class__.touched = False
def test_pass(self):
return defer.succeed('success')
def test_passGenerated(self):
self._touchClass(None)
yield None
test_passGenerated = runWithWarningsSuppressed(
[ SUPPRESS(message="twisted.internet.defer.deferredGenerator was "
"deprecated") ],
defer.deferredGenerator, test_passGenerated)
@defer.inlineCallbacks
def test_passInlineCallbacks(self):
"""
Test case that is decorated with L{defer.inlineCallbacks}.
"""
self._touchClass(None)
yield None
def test_fail(self):
return defer.fail(self.failureException('I fail'))
def test_failureInCallback(self):
d = defer.succeed('fail')
d.addCallback(self._cb_fail)
return d
def test_errorInCallback(self):
d = defer.succeed('error')
d.addCallback(self._cb_error)
return d
def test_skip(self):
d = defer.succeed('skip')
d.addCallback(self._cb_skip)
d.addCallback(self._touchClass)
return d
def test_thread(self):
return threads.deferToThread(lambda : None)
def test_expectedFailure(self):
d = defer.succeed('todo')
d.addCallback(self._cb_error)
return d
test_expectedFailure.todo = "Expected failure"
class TimeoutTests(unittest.TestCase):
timedOut = None
def test_pass(self):
d = defer.Deferred()
reactor.callLater(0, d.callback, 'hoorj!')
return d
test_pass.timeout = 2
def test_passDefault(self):
# test default timeout
d = defer.Deferred()
reactor.callLater(0, d.callback, 'hoorj!')
return d
def test_timeout(self):
return defer.Deferred()
test_timeout.timeout = 0.1
def test_timeoutZero(self):
return defer.Deferred()
test_timeoutZero.timeout = 0
def test_expectedFailure(self):
return defer.Deferred()
test_expectedFailure.timeout = 0.1
test_expectedFailure.todo = "i will get it right, eventually"
def test_skip(self):
return defer.Deferred()
test_skip.timeout = 0.1
test_skip.skip = "i will get it right, eventually"
def test_errorPropagation(self):
def timedOut(err):
self.__class__.timedOut = err
return err
d = defer.Deferred()
d.addErrback(timedOut)
return d
test_errorPropagation.timeout = 0.1
def test_calledButNeverCallback(self):
d = defer.Deferred()
def neverFire(r):
return defer.Deferred()
d.addCallback(neverFire)
d.callback(1)
return d
test_calledButNeverCallback.timeout = 0.1
class TestClassTimeoutAttribute(unittest.TestCase):
timeout = 0.2
def setUp(self):
self.d = defer.Deferred()
def testMethod(self):
self.methodCalled = True
return self.d
|
mollstam/UnrealPy
|
refs/heads/master
|
UnrealPyEmbed/Source/Python/Lib/python27/test/test_shlex.py
|
180
|
# -*- coding: iso-8859-1 -*-
import unittest
import shlex
from test import test_support
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
# The original test data set was from shellwords, by Hartmut Goebel.
data = r"""x|x|
foo bar|foo|bar|
foo bar|foo|bar|
foo bar |foo|bar|
foo bar bla fasel|foo|bar|bla|fasel|
x y z xxxx|x|y|z|xxxx|
\x bar|\|x|bar|
\ x bar|\|x|bar|
\ bar|\|bar|
foo \x bar|foo|\|x|bar|
foo \ x bar|foo|\|x|bar|
foo \ bar|foo|\|bar|
foo "bar" bla|foo|"bar"|bla|
"foo" "bar" "bla"|"foo"|"bar"|"bla"|
"foo" bar "bla"|"foo"|bar|"bla"|
"foo" bar bla|"foo"|bar|bla|
foo 'bar' bla|foo|'bar'|bla|
'foo' 'bar' 'bla'|'foo'|'bar'|'bla'|
'foo' bar 'bla'|'foo'|bar|'bla'|
'foo' bar bla|'foo'|bar|bla|
blurb foo"bar"bar"fasel" baz|blurb|foo"bar"bar"fasel"|baz|
blurb foo'bar'bar'fasel' baz|blurb|foo'bar'bar'fasel'|baz|
""|""|
''|''|
foo "" bar|foo|""|bar|
foo '' bar|foo|''|bar|
foo "" "" "" bar|foo|""|""|""|bar|
foo '' '' '' bar|foo|''|''|''|bar|
\""|\|""|
"\"|"\"|
"foo\ bar"|"foo\ bar"|
"foo\\ bar"|"foo\\ bar"|
"foo\\ bar\"|"foo\\ bar\"|
"foo\\" bar\""|"foo\\"|bar|\|""|
"foo\\ bar\" dfadf"|"foo\\ bar\"|dfadf"|
"foo\\\ bar\" dfadf"|"foo\\\ bar\"|dfadf"|
"foo\\\x bar\" dfadf"|"foo\\\x bar\"|dfadf"|
"foo\x bar\" dfadf"|"foo\x bar\"|dfadf"|
\''|\|''|
'foo\ bar'|'foo\ bar'|
'foo\\ bar'|'foo\\ bar'|
"foo\\\x bar\" df'a\ 'df'|"foo\\\x bar\"|df'a|\|'df'|
\"foo"|\|"foo"|
\"foo"\x|\|"foo"|\|x|
"foo\x"|"foo\x"|
"foo\ "|"foo\ "|
foo\ xx|foo|\|xx|
foo\ x\x|foo|\|x|\|x|
foo\ x\x\""|foo|\|x|\|x|\|""|
"foo\ x\x"|"foo\ x\x"|
"foo\ x\x\\"|"foo\ x\x\\"|
"foo\ x\x\\""foobar"|"foo\ x\x\\"|"foobar"|
"foo\ x\x\\"\''"foobar"|"foo\ x\x\\"|\|''|"foobar"|
"foo\ x\x\\"\'"fo'obar"|"foo\ x\x\\"|\|'"fo'|obar"|
"foo\ x\x\\"\'"fo'obar" 'don'\''t'|"foo\ x\x\\"|\|'"fo'|obar"|'don'|\|''|t'|
'foo\ bar'|'foo\ bar'|
'foo\\ bar'|'foo\\ bar'|
foo\ bar|foo|\|bar|
foo#bar\nbaz|foobaz|
:-) ;-)|:|-|)|;|-|)|
áéíóú|á|é|í|ó|ú|
"""
posix_data = r"""x|x|
foo bar|foo|bar|
foo bar|foo|bar|
foo bar |foo|bar|
foo bar bla fasel|foo|bar|bla|fasel|
x y z xxxx|x|y|z|xxxx|
\x bar|x|bar|
\ x bar| x|bar|
\ bar| bar|
foo \x bar|foo|x|bar|
foo \ x bar|foo| x|bar|
foo \ bar|foo| bar|
foo "bar" bla|foo|bar|bla|
"foo" "bar" "bla"|foo|bar|bla|
"foo" bar "bla"|foo|bar|bla|
"foo" bar bla|foo|bar|bla|
foo 'bar' bla|foo|bar|bla|
'foo' 'bar' 'bla'|foo|bar|bla|
'foo' bar 'bla'|foo|bar|bla|
'foo' bar bla|foo|bar|bla|
blurb foo"bar"bar"fasel" baz|blurb|foobarbarfasel|baz|
blurb foo'bar'bar'fasel' baz|blurb|foobarbarfasel|baz|
""||
''||
foo "" bar|foo||bar|
foo '' bar|foo||bar|
foo "" "" "" bar|foo||||bar|
foo '' '' '' bar|foo||||bar|
\"|"|
"\""|"|
"foo\ bar"|foo\ bar|
"foo\\ bar"|foo\ bar|
"foo\\ bar\""|foo\ bar"|
"foo\\" bar\"|foo\|bar"|
"foo\\ bar\" dfadf"|foo\ bar" dfadf|
"foo\\\ bar\" dfadf"|foo\\ bar" dfadf|
"foo\\\x bar\" dfadf"|foo\\x bar" dfadf|
"foo\x bar\" dfadf"|foo\x bar" dfadf|
\'|'|
'foo\ bar'|foo\ bar|
'foo\\ bar'|foo\\ bar|
"foo\\\x bar\" df'a\ 'df"|foo\\x bar" df'a\ 'df|
\"foo|"foo|
\"foo\x|"foox|
"foo\x"|foo\x|
"foo\ "|foo\ |
foo\ xx|foo xx|
foo\ x\x|foo xx|
foo\ x\x\"|foo xx"|
"foo\ x\x"|foo\ x\x|
"foo\ x\x\\"|foo\ x\x\|
"foo\ x\x\\""foobar"|foo\ x\x\foobar|
"foo\ x\x\\"\'"foobar"|foo\ x\x\'foobar|
"foo\ x\x\\"\'"fo'obar"|foo\ x\x\'fo'obar|
"foo\ x\x\\"\'"fo'obar" 'don'\''t'|foo\ x\x\'fo'obar|don't|
"foo\ x\x\\"\'"fo'obar" 'don'\''t' \\|foo\ x\x\'fo'obar|don't|\|
'foo\ bar'|foo\ bar|
'foo\\ bar'|foo\\ bar|
foo\ bar|foo bar|
foo#bar\nbaz|foo|baz|
:-) ;-)|:-)|;-)|
áéíóú|áéíóú|
"""
class ShlexTest(unittest.TestCase):
def setUp(self):
self.data = [x.split("|")[:-1]
for x in data.splitlines()]
self.posix_data = [x.split("|")[:-1]
for x in posix_data.splitlines()]
for item in self.data:
item[0] = item[0].replace(r"\n", "\n")
for item in self.posix_data:
item[0] = item[0].replace(r"\n", "\n")
def splitTest(self, data, comments):
for i in range(len(data)):
l = shlex.split(data[i][0], comments=comments)
self.assertEqual(l, data[i][1:],
"%s: %s != %s" %
(data[i][0], l, data[i][1:]))
def oldSplit(self, s):
ret = []
lex = shlex.shlex(StringIO(s))
tok = lex.get_token()
while tok:
ret.append(tok)
tok = lex.get_token()
return ret
def testSplitPosix(self):
"""Test data splitting with posix parser"""
self.splitTest(self.posix_data, comments=True)
def testCompat(self):
"""Test compatibility interface"""
for i in range(len(self.data)):
l = self.oldSplit(self.data[i][0])
self.assertEqual(l, self.data[i][1:],
"%s: %s != %s" %
(self.data[i][0], l, self.data[i][1:]))
# Allow this test to be used with old shlex.py
if not getattr(shlex, "split", None):
for methname in dir(ShlexTest):
if methname.startswith("test") and methname != "testCompat":
delattr(ShlexTest, methname)
def test_main():
test_support.run_unittest(ShlexTest)
if __name__ == "__main__":
test_main()
|
andrewguy9/safeoutput
|
refs/heads/master
|
safeoutput/__init__.py
|
1
|
import argparse
import logging
import sys
from builtins import object
from os import rename
from os.path import abspath, dirname
from tempfile import NamedTemporaryFile
LOG = logging.getLogger(__name__)
def open(dst=None, mode="w"):
if dst:
fd = NamedTemporaryFile(dir=dirname(abspath(dst)), mode=mode)
else:
if mode == "w":
fd = sys.stdout
else:
try:
fd = sys.stdout.buffer
except AttributeError:
fd = sys.stdout
return _SafeOutputWrapper(fd, dst)
class _SafeOutputWrapper(object):
def __init__(self, fd, dst):
self.fd = fd
self.dst = dst
def __enter__(self):
if self.dst:
self.fd.__enter__()
return self
def __getattr__(self, name):
# Attribute lookups are delegated to the underlying tempfile
fd = self.__dict__['fd']
a = getattr(fd, name)
return a
def close(self, commit=True):
if self.dst:
if commit == True:
LOG.debug(u"renaming %s to %s", self.fd.name, self.dst)
self.fd.flush()
rename(self.fd.name, self.dst)
# self.fd.delete = False # doesn't work in python3...?
try:
LOG.debug(u"closed %s", self.fd.name)
self.fd.close()
except EnvironmentError: # aka FileNotFoundError in Python 3
pass
def __exit__(self, exc_type, exc_value, traceback):
self.close(exc_value is None)
if self.dst:
return self.fd.__exit__(exc_type, exc_value, traceback)
else:
return exc_type == None
def __del__(self):
# If we get to __del__ and have not already committed,
# we don't know that the output is safe. Allow
# tempfile to delete the file.
self.close(False)
def main(args=None):
"""Buffer stdin and flush, and avoid incomplete files."""
parser = argparse.ArgumentParser(description=main.__doc__)
parser.add_argument('--binary',
dest='mode',
action='store_const',
const="wb",
default="w",
help='write in binary mode')
parser.add_argument('output',
metavar='FILE',
type=unicode,
help='Output file')
logging.basicConfig(
level=logging.DEBUG,
stream=sys.stderr,
format='[%(levelname)s elapsed=%(relativeCreated)dms] %(message)s')
args = parser.parse_args(args or sys.argv[1:])
with open(args.output, args.mode) as fd:
for line in sys.stdin:
fd.write(line)
|
qedi-r/home-assistant
|
refs/heads/dev
|
tests/components/smartthings/test_switch.py
|
5
|
"""
Test for the SmartThings switch platform.
The only mocking required is of the underlying SmartThings API object so
real HTTP calls are not initiated during testing.
"""
from pysmartthings import Attribute, Capability
from homeassistant.components.smartthings import switch
from homeassistant.components.smartthings.const import DOMAIN, SIGNAL_SMARTTHINGS_UPDATE
from homeassistant.components.switch import (
ATTR_CURRENT_POWER_W,
ATTR_TODAY_ENERGY_KWH,
DOMAIN as SWITCH_DOMAIN,
)
from homeassistant.helpers.dispatcher import async_dispatcher_send
from .conftest import setup_platform
async def test_async_setup_platform():
"""Test setup platform does nothing (it uses config entries)."""
await switch.async_setup_platform(None, None, None)
async def test_entity_and_device_attributes(hass, device_factory):
"""Test the attributes of the entity are correct."""
# Arrange
device = device_factory("Switch_1", [Capability.switch], {Attribute.switch: "on"})
entity_registry = await hass.helpers.entity_registry.async_get_registry()
device_registry = await hass.helpers.device_registry.async_get_registry()
# Act
await setup_platform(hass, SWITCH_DOMAIN, devices=[device])
# Assert
entry = entity_registry.async_get("switch.switch_1")
assert entry
assert entry.unique_id == device.device_id
entry = device_registry.async_get_device({(DOMAIN, device.device_id)}, [])
assert entry
assert entry.name == device.label
assert entry.model == device.device_type_name
assert entry.manufacturer == "Unavailable"
async def test_turn_off(hass, device_factory):
"""Test the switch turns of successfully."""
# Arrange
device = device_factory("Switch_1", [Capability.switch], {Attribute.switch: "on"})
await setup_platform(hass, SWITCH_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
"switch", "turn_off", {"entity_id": "switch.switch_1"}, blocking=True
)
# Assert
state = hass.states.get("switch.switch_1")
assert state is not None
assert state.state == "off"
async def test_turn_on(hass, device_factory):
"""Test the switch turns of successfully."""
# Arrange
device = device_factory(
"Switch_1",
[Capability.switch, Capability.power_meter, Capability.energy_meter],
{Attribute.switch: "off", Attribute.power: 355, Attribute.energy: 11.422},
)
await setup_platform(hass, SWITCH_DOMAIN, devices=[device])
# Act
await hass.services.async_call(
"switch", "turn_on", {"entity_id": "switch.switch_1"}, blocking=True
)
# Assert
state = hass.states.get("switch.switch_1")
assert state is not None
assert state.state == "on"
assert state.attributes[ATTR_CURRENT_POWER_W] == 355
assert state.attributes[ATTR_TODAY_ENERGY_KWH] == 11.422
async def test_update_from_signal(hass, device_factory):
"""Test the switch updates when receiving a signal."""
# Arrange
device = device_factory("Switch_1", [Capability.switch], {Attribute.switch: "off"})
await setup_platform(hass, SWITCH_DOMAIN, devices=[device])
await device.switch_on(True)
# Act
async_dispatcher_send(hass, SIGNAL_SMARTTHINGS_UPDATE, [device.device_id])
# Assert
await hass.async_block_till_done()
state = hass.states.get("switch.switch_1")
assert state is not None
assert state.state == "on"
async def test_unload_config_entry(hass, device_factory):
"""Test the switch is removed when the config entry is unloaded."""
# Arrange
device = device_factory("Switch 1", [Capability.switch], {Attribute.switch: "on"})
config_entry = await setup_platform(hass, SWITCH_DOMAIN, devices=[device])
# Act
await hass.config_entries.async_forward_entry_unload(config_entry, "switch")
# Assert
assert not hass.states.get("switch.switch_1")
|
EmadMokhtar/Django
|
refs/heads/master
|
tests/select_related/tests.py
|
67
|
from django.core.exceptions import FieldError
from django.test import SimpleTestCase, TestCase
from .models import (
Bookmark, Domain, Family, Genus, HybridSpecies, Kingdom, Klass, Order,
Phylum, Pizza, Species, TaggedItem,
)
class SelectRelatedTests(TestCase):
@classmethod
def create_tree(cls, stringtree):
"""
Helper to create a complete tree.
"""
names = stringtree.split()
models = [Domain, Kingdom, Phylum, Klass, Order, Family, Genus, Species]
assert len(names) == len(models), (names, models)
parent = None
for name, model in zip(names, models):
try:
obj = model.objects.get(name=name)
except model.DoesNotExist:
obj = model(name=name)
if parent:
setattr(obj, parent.__class__.__name__.lower(), parent)
obj.save()
parent = obj
@classmethod
def setUpTestData(cls):
cls.create_tree("Eukaryota Animalia Anthropoda Insecta Diptera Drosophilidae Drosophila melanogaster")
cls.create_tree("Eukaryota Animalia Chordata Mammalia Primates Hominidae Homo sapiens")
cls.create_tree("Eukaryota Plantae Magnoliophyta Magnoliopsida Fabales Fabaceae Pisum sativum")
cls.create_tree("Eukaryota Fungi Basidiomycota Homobasidiomycatae Agaricales Amanitacae Amanita muscaria")
def test_access_fks_without_select_related(self):
"""
Normally, accessing FKs doesn't fill in related objects
"""
with self.assertNumQueries(8):
fly = Species.objects.get(name="melanogaster")
domain = fly.genus.family.order.klass.phylum.kingdom.domain
self.assertEqual(domain.name, 'Eukaryota')
def test_access_fks_with_select_related(self):
"""
A select_related() call will fill in those related objects without any
extra queries
"""
with self.assertNumQueries(1):
person = (
Species.objects
.select_related('genus__family__order__klass__phylum__kingdom__domain')
.get(name="sapiens")
)
domain = person.genus.family.order.klass.phylum.kingdom.domain
self.assertEqual(domain.name, 'Eukaryota')
def test_list_without_select_related(self):
"""
select_related() also of course applies to entire lists, not just
items. This test verifies the expected behavior without select_related.
"""
with self.assertNumQueries(9):
world = Species.objects.all()
families = [o.genus.family.name for o in world]
self.assertEqual(sorted(families), [
'Amanitacae',
'Drosophilidae',
'Fabaceae',
'Hominidae',
])
def test_list_with_select_related(self):
"""
select_related() also of course applies to entire lists, not just
items. This test verifies the expected behavior with select_related.
"""
with self.assertNumQueries(1):
world = Species.objects.all().select_related()
families = [o.genus.family.name for o in world]
self.assertEqual(sorted(families), [
'Amanitacae',
'Drosophilidae',
'Fabaceae',
'Hominidae',
])
def test_list_with_depth(self):
"""
Passing a relationship field lookup specifier to select_related() will
stop the descent at a particular level. This can be used on lists as
well.
"""
with self.assertNumQueries(5):
world = Species.objects.all().select_related('genus__family')
orders = [o.genus.family.order.name for o in world]
self.assertEqual(sorted(orders), ['Agaricales', 'Diptera', 'Fabales', 'Primates'])
def test_select_related_with_extra(self):
s = (Species.objects.all()
.select_related()
.extra(select={'a': 'select_related_species.id + 10'})[0])
self.assertEqual(s.id + 10, s.a)
def test_certain_fields(self):
"""
The optional fields passed to select_related() control which related
models we pull in. This allows for smaller queries.
In this case, we explicitly say to select the 'genus' and
'genus.family' models, leading to the same number of queries as before.
"""
with self.assertNumQueries(1):
world = Species.objects.select_related('genus__family')
families = [o.genus.family.name for o in world]
self.assertEqual(sorted(families), ['Amanitacae', 'Drosophilidae', 'Fabaceae', 'Hominidae'])
def test_more_certain_fields(self):
"""
In this case, we explicitly say to select the 'genus' and
'genus.family' models, leading to the same number of queries as before.
"""
with self.assertNumQueries(2):
world = Species.objects.filter(genus__name='Amanita')\
.select_related('genus__family')
orders = [o.genus.family.order.name for o in world]
self.assertEqual(orders, ['Agaricales'])
def test_field_traversal(self):
with self.assertNumQueries(1):
s = (Species.objects.all()
.select_related('genus__family__order')
.order_by('id')[0:1].get().genus.family.order.name)
self.assertEqual(s, 'Diptera')
def test_none_clears_list(self):
queryset = Species.objects.select_related('genus').select_related(None)
self.assertIs(queryset.query.select_related, False)
def test_chaining(self):
parent_1, parent_2 = Species.objects.all()[:2]
HybridSpecies.objects.create(name='hybrid', parent_1=parent_1, parent_2=parent_2)
queryset = HybridSpecies.objects.select_related('parent_1').select_related('parent_2')
with self.assertNumQueries(1):
obj = queryset[0]
self.assertEqual(obj.parent_1, parent_1)
self.assertEqual(obj.parent_2, parent_2)
def test_reverse_relation_caching(self):
species = Species.objects.select_related('genus').filter(name='melanogaster').first()
with self.assertNumQueries(0):
self.assertEqual(species.genus.name, 'Drosophila')
# The species_set reverse relation isn't cached.
self.assertEqual(species.genus._state.fields_cache, {})
with self.assertNumQueries(1):
self.assertEqual(species.genus.species_set.first().name, 'melanogaster')
def test_select_related_after_values(self):
"""
Running select_related() after calling values() raises a TypeError
"""
message = "Cannot call select_related() after .values() or .values_list()"
with self.assertRaisesMessage(TypeError, message):
list(Species.objects.values('name').select_related('genus'))
def test_select_related_after_values_list(self):
"""
Running select_related() after calling values_list() raises a TypeError
"""
message = "Cannot call select_related() after .values() or .values_list()"
with self.assertRaisesMessage(TypeError, message):
list(Species.objects.values_list('name').select_related('genus'))
class SelectRelatedValidationTests(SimpleTestCase):
"""
select_related() should thrown an error on fields that do not exist and
non-relational fields.
"""
non_relational_error = "Non-relational field given in select_related: '%s'. Choices are: %s"
invalid_error = "Invalid field name(s) given in select_related: '%s'. Choices are: %s"
def test_non_relational_field(self):
with self.assertRaisesMessage(FieldError, self.non_relational_error % ('name', 'genus')):
list(Species.objects.select_related('name__some_field'))
with self.assertRaisesMessage(FieldError, self.non_relational_error % ('name', 'genus')):
list(Species.objects.select_related('name'))
with self.assertRaisesMessage(FieldError, self.non_relational_error % ('name', '(none)')):
list(Domain.objects.select_related('name'))
def test_non_relational_field_nested(self):
with self.assertRaisesMessage(FieldError, self.non_relational_error % ('name', 'family')):
list(Species.objects.select_related('genus__name'))
def test_many_to_many_field(self):
with self.assertRaisesMessage(FieldError, self.invalid_error % ('toppings', '(none)')):
list(Pizza.objects.select_related('toppings'))
def test_reverse_relational_field(self):
with self.assertRaisesMessage(FieldError, self.invalid_error % ('child_1', 'genus')):
list(Species.objects.select_related('child_1'))
def test_invalid_field(self):
with self.assertRaisesMessage(FieldError, self.invalid_error % ('invalid_field', 'genus')):
list(Species.objects.select_related('invalid_field'))
with self.assertRaisesMessage(FieldError, self.invalid_error % ('related_invalid_field', 'family')):
list(Species.objects.select_related('genus__related_invalid_field'))
with self.assertRaisesMessage(FieldError, self.invalid_error % ('invalid_field', '(none)')):
list(Domain.objects.select_related('invalid_field'))
def test_generic_relations(self):
with self.assertRaisesMessage(FieldError, self.invalid_error % ('tags', '')):
list(Bookmark.objects.select_related('tags'))
with self.assertRaisesMessage(FieldError, self.invalid_error % ('content_object', 'content_type')):
list(TaggedItem.objects.select_related('content_object'))
|
RMKD/networkx
|
refs/heads/master
|
networkx/algorithms/centrality/tests/test_degree_centrality.py
|
101
|
"""
Unit tests for degree centrality.
"""
from nose.tools import *
import networkx as nx
class TestDegreeCentrality:
def __init__(self):
self.K = nx.krackhardt_kite_graph()
self.P3 = nx.path_graph(3)
self.K5 = nx.complete_graph(5)
F = nx.Graph() # Florentine families
F.add_edge('Acciaiuoli','Medici')
F.add_edge('Castellani','Peruzzi')
F.add_edge('Castellani','Strozzi')
F.add_edge('Castellani','Barbadori')
F.add_edge('Medici','Barbadori')
F.add_edge('Medici','Ridolfi')
F.add_edge('Medici','Tornabuoni')
F.add_edge('Medici','Albizzi')
F.add_edge('Medici','Salviati')
F.add_edge('Salviati','Pazzi')
F.add_edge('Peruzzi','Strozzi')
F.add_edge('Peruzzi','Bischeri')
F.add_edge('Strozzi','Ridolfi')
F.add_edge('Strozzi','Bischeri')
F.add_edge('Ridolfi','Tornabuoni')
F.add_edge('Tornabuoni','Guadagni')
F.add_edge('Albizzi','Ginori')
F.add_edge('Albizzi','Guadagni')
F.add_edge('Bischeri','Guadagni')
F.add_edge('Guadagni','Lamberteschi')
self.F = F
G = nx.DiGraph()
G.add_edge(0,5)
G.add_edge(1,5)
G.add_edge(2,5)
G.add_edge(3,5)
G.add_edge(4,5)
G.add_edge(5,6)
G.add_edge(5,7)
G.add_edge(5,8)
self.G = G
def test_degree_centrality_1(self):
d = nx.degree_centrality(self.K5)
exact = dict(zip(range(5), [1]*5))
for n,dc in d.items():
assert_almost_equal(exact[n], dc)
def test_degree_centrality_2(self):
d = nx.degree_centrality(self.P3)
exact = {0:0.5, 1:1, 2:0.5}
for n,dc in d.items():
assert_almost_equal(exact[n], dc)
def test_degree_centrality_3(self):
d = nx.degree_centrality(self.K)
exact = {0:.444, 1:.444, 2:.333, 3:.667, 4:.333,
5:.556, 6:.556, 7:.333, 8:.222, 9:.111}
for n,dc in d.items():
assert_almost_equal(exact[n], float("%5.3f" % dc))
def test_degree_centrality_4(self):
d = nx.degree_centrality(self.F)
names = sorted(self.F.nodes())
dcs = [0.071, 0.214, 0.143, 0.214, 0.214, 0.071, 0.286,
0.071, 0.429, 0.071, 0.214, 0.214, 0.143, 0.286, 0.214]
exact = dict(zip(names, dcs))
for n,dc in d.items():
assert_almost_equal(exact[n], float("%5.3f" % dc))
def test_indegree_centrality(self):
d = nx.in_degree_centrality(self.G)
exact = {0: 0.0, 1: 0.0, 2: 0.0, 3: 0.0, 4: 0.0,
5: 0.625, 6: 0.125, 7: 0.125, 8: 0.125}
for n,dc in d.items():
assert_almost_equal(exact[n], dc)
def test_outdegree_centrality(self):
d = nx.out_degree_centrality(self.G)
exact = {0: 0.125, 1: 0.125, 2: 0.125, 3: 0.125,
4: 0.125, 5: 0.375, 6: 0.0, 7: 0.0, 8: 0.0}
for n,dc in d.items():
assert_almost_equal(exact[n], dc)
|
dgladkov/django
|
refs/heads/master
|
tests/logging_tests/logconfig.py
|
609
|
import logging
from django.conf import settings
from django.core.mail.backends.base import BaseEmailBackend
class MyHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
self.config = settings.LOGGING
class MyEmailBackend(BaseEmailBackend):
def send_messages(self, email_messages):
pass
|
evanma92/routeh
|
refs/heads/master
|
flask/lib/python2.7/site-packages/whoosh/util/text.py
|
96
|
# Copyright 2007 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
import codecs, re
from whoosh.compat import string_type, u, byte
# Note: these functions return a tuple of (text, length), so when you call
# them, you have to add [0] on the end, e.g. str = utf8encode(unicode)[0]
utf8encode = codecs.getencoder("utf-8")
utf8decode = codecs.getdecoder("utf-8")
# Prefix encoding functions
def first_diff(a, b):
"""
Returns the position of the first differing character in the sequences a
and b. For example, first_diff('render', 'rending') == 4. This function
limits the return value to 255 so the difference can be encoded in a single
byte.
"""
i = 0
while i <= 255 and i < len(a) and i < len(b) and a[i] == b[i]:
i += 1
return i
def prefix_encode(a, b):
"""
Compresses bytestring b as a byte representing the prefix it shares with a,
followed by the suffix bytes.
"""
i = first_diff(a, b)
return byte(i) + b[i:]
def prefix_encode_all(ls):
"""Compresses the given list of (unicode) strings by storing each string
(except the first one) as an integer (encoded in a byte) representing
the prefix it shares with its predecessor, followed by the suffix encoded
as UTF-8.
"""
last = u('')
for w in ls:
i = first_diff(last, w)
yield chr(i) + w[i:].encode("utf-8")
last = w
def prefix_decode_all(ls):
"""Decompresses a list of strings compressed by prefix_encode().
"""
last = u('')
for w in ls:
i = ord(w[0])
decoded = last[:i] + w[1:].decode("utf-8")
yield decoded
last = decoded
# Natural key sorting function
_nkre = re.compile(r"\D+|\d+", re.UNICODE)
def _nkconv(i):
try:
return int(i)
except ValueError:
return i.lower()
def natural_key(s):
"""Converts string ``s`` into a tuple that will sort "naturally" (i.e.,
``name5`` will come before ``name10`` and ``1`` will come before ``A``).
This function is designed to be used as the ``key`` argument to sorting
functions.
:param s: the str/unicode string to convert.
:rtype: tuple
"""
# Use _nkre to split the input string into a sequence of
# digit runs and non-digit runs. Then use _nkconv() to convert
# the digit runs into ints and the non-digit runs to lowercase.
return tuple(_nkconv(m) for m in _nkre.findall(s))
# Regular expression functions
def rcompile(pattern, flags=0, verbose=False):
"""A wrapper for re.compile that checks whether "pattern" is a regex object
or a string to be compiled, and automatically adds the re.UNICODE flag.
"""
if not isinstance(pattern, string_type):
# If it's not a string, assume it's already a compiled pattern
return pattern
if verbose:
flags |= re.VERBOSE
return re.compile(pattern, re.UNICODE | flags)
|
XueqingLin/tensorflow
|
refs/heads/master
|
tensorflow/contrib/learn/python/learn/dataframe/transforms/boolean_mask.py
|
10
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Masks one `Series` based on the content of another `Series`."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.learn.python.learn.dataframe import series
from tensorflow.contrib.learn.python.learn.dataframe import transform
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import sparse_ops
def sparse_boolean_mask(sparse_tensor, mask, name="sparse_boolean_mask"):
"""Boolean mask for `SparseTensor`s.
Args:
sparse_tensor: a `SparseTensor`.
mask: a 1D boolean dense`Tensor` whose length is equal to the 0th dimension
of `sparse_tensor`.
name: optional name for this operation.
Returns:
A `SparseTensor` that contains row `k` of `sparse_tensor` iff `mask[k]` is
`True`.
"""
# TODO(jamieas): consider mask dimension > 1 for symmetry with `boolean_mask`.
with ops.name_scope(name, values=[sparse_tensor, mask]):
mask = ops.convert_to_tensor(mask)
mask_rows = array_ops.where(mask)
first_indices = array_ops.squeeze(array_ops.slice(sparse_tensor.indices,
[0, 0], [-1, 1]))
# Identify indices corresponding to the rows identified by mask_rows.
sparse_entry_matches = functional_ops.map_fn(
lambda x: math_ops.equal(first_indices, x),
mask_rows,
dtype=dtypes.bool)
# Combine the rows of index_matches to form a mask for the sparse indices
# and values.
to_retain = array_ops.reshape(
functional_ops.foldl(math_ops.logical_or, sparse_entry_matches), [-1])
return sparse_ops.sparse_retain(sparse_tensor, to_retain)
@series.Series.register_binary_op("select_rows")
class BooleanMask(transform.TensorFlowTransform):
"""Apply a boolean mask to a `Series`."""
@property
def name(self):
return "BooleanMask"
@property
def input_valency(self):
return 2
@property
def _output_names(self):
return "output",
def _apply_transform(self, input_tensors, **kwargs):
"""Applies the transformation to the `transform_input`.
Args:
input_tensors: a list of Tensors representing the input to
the Transform.
**kwargs: Additional keyword arguments, unused here.
Returns:
A namedtuple of Tensors representing the transformed output.
"""
input_tensor = input_tensors[0]
mask = input_tensors[1]
if mask.get_shape().ndims > 1:
mask = array_ops.squeeze(mask)
if isinstance(input_tensor, ops.SparseTensor):
mask_fn = sparse_boolean_mask
else:
mask_fn = array_ops.boolean_mask
# pylint: disable=not-callable
return self.return_type(mask_fn(input_tensor, mask))
|
nickgashkov/virtualspace
|
refs/heads/dev
|
virtualspace/utils/models/__init__.py
|
13
|
# Copyright (c) 2017 Nick Gashkov
#
# Distributed under MIT License. See LICENSE file for details.
|
googleapis/python-redis
|
refs/heads/master
|
tests/unit/gapic/redis_v1beta1/test_cloud_redis.py
|
1
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.redis_v1beta1.services.cloud_redis import CloudRedisAsyncClient
from google.cloud.redis_v1beta1.services.cloud_redis import CloudRedisClient
from google.cloud.redis_v1beta1.services.cloud_redis import pagers
from google.cloud.redis_v1beta1.services.cloud_redis import transports
from google.cloud.redis_v1beta1.services.cloud_redis.transports.base import (
_GOOGLE_AUTH_VERSION,
)
from google.cloud.redis_v1beta1.types import cloud_redis
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import any_pb2 # type: ignore
from google.protobuf import field_mask_pb2 # type: ignore
from google.protobuf import timestamp_pb2 # type: ignore
import google.auth
# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
# through google-api-core:
# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert CloudRedisClient._get_default_mtls_endpoint(None) is None
assert (
CloudRedisClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
)
assert (
CloudRedisClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
CloudRedisClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
CloudRedisClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert CloudRedisClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [CloudRedisClient, CloudRedisAsyncClient,])
def test_cloud_redis_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "redis.googleapis.com:443"
@pytest.mark.parametrize("client_class", [CloudRedisClient, CloudRedisAsyncClient,])
def test_cloud_redis_client_service_account_always_use_jwt(client_class):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
client = client_class(credentials=creds)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.CloudRedisGrpcTransport, "grpc"),
(transports.CloudRedisGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_cloud_redis_client_service_account_always_use_jwt_true(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
@pytest.mark.parametrize("client_class", [CloudRedisClient, CloudRedisAsyncClient,])
def test_cloud_redis_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "redis.googleapis.com:443"
def test_cloud_redis_client_get_transport_class():
transport = CloudRedisClient.get_transport_class()
available_transports = [
transports.CloudRedisGrpcTransport,
]
assert transport in available_transports
transport = CloudRedisClient.get_transport_class("grpc")
assert transport == transports.CloudRedisGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"),
(
CloudRedisAsyncClient,
transports.CloudRedisGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)
)
@mock.patch.object(
CloudRedisAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(CloudRedisAsyncClient),
)
def test_cloud_redis_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(CloudRedisClient, "get_transport_class") as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(CloudRedisClient, "get_transport_class") as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "true"),
(
CloudRedisAsyncClient,
transports.CloudRedisGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc", "false"),
(
CloudRedisAsyncClient,
transports.CloudRedisGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
CloudRedisClient, "DEFAULT_ENDPOINT", modify_default_endpoint(CloudRedisClient)
)
@mock.patch.object(
CloudRedisAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(CloudRedisAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_cloud_redis_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"),
(
CloudRedisAsyncClient,
transports.CloudRedisGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_cloud_redis_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(CloudRedisClient, transports.CloudRedisGrpcTransport, "grpc"),
(
CloudRedisAsyncClient,
transports.CloudRedisGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_cloud_redis_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_cloud_redis_client_client_options_from_dict():
with mock.patch(
"google.cloud.redis_v1beta1.services.cloud_redis.transports.CloudRedisGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = CloudRedisClient(client_options={"api_endpoint": "squid.clam.whelk"})
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
def test_list_instances(
transport: str = "grpc", request_type=cloud_redis.ListInstancesRequest
):
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_redis.ListInstancesResponse(
next_page_token="next_page_token_value", unreachable=["unreachable_value"],
)
response = client.list_instances(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.ListInstancesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListInstancesPager)
assert response.next_page_token == "next_page_token_value"
assert response.unreachable == ["unreachable_value"]
def test_list_instances_from_dict():
test_list_instances(request_type=dict)
def test_list_instances_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
client.list_instances()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.ListInstancesRequest()
@pytest.mark.asyncio
async def test_list_instances_async(
transport: str = "grpc_asyncio", request_type=cloud_redis.ListInstancesRequest
):
client = CloudRedisAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_redis.ListInstancesResponse(
next_page_token="next_page_token_value",
unreachable=["unreachable_value"],
)
)
response = await client.list_instances(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.ListInstancesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListInstancesAsyncPager)
assert response.next_page_token == "next_page_token_value"
assert response.unreachable == ["unreachable_value"]
@pytest.mark.asyncio
async def test_list_instances_async_from_dict():
await test_list_instances_async(request_type=dict)
def test_list_instances_field_headers():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.ListInstancesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
call.return_value = cloud_redis.ListInstancesResponse()
client.list_instances(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_list_instances_field_headers_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.ListInstancesRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_redis.ListInstancesResponse()
)
await client.list_instances(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_list_instances_flattened():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_redis.ListInstancesResponse()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.list_instances(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
def test_list_instances_flattened_error():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.list_instances(
cloud_redis.ListInstancesRequest(), parent="parent_value",
)
@pytest.mark.asyncio
async def test_list_instances_flattened_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_redis.ListInstancesResponse()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_redis.ListInstancesResponse()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.list_instances(parent="parent_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
@pytest.mark.asyncio
async def test_list_instances_flattened_error_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.list_instances(
cloud_redis.ListInstancesRequest(), parent="parent_value",
)
def test_list_instances_pager():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_redis.ListInstancesResponse(
instances=[
cloud_redis.Instance(),
cloud_redis.Instance(),
cloud_redis.Instance(),
],
next_page_token="abc",
),
cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
cloud_redis.ListInstancesResponse(
instances=[cloud_redis.Instance(),], next_page_token="ghi",
),
cloud_redis.ListInstancesResponse(
instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
)
pager = client.list_instances(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, cloud_redis.Instance) for i in results)
def test_list_instances_pages():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.list_instances), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_redis.ListInstancesResponse(
instances=[
cloud_redis.Instance(),
cloud_redis.Instance(),
cloud_redis.Instance(),
],
next_page_token="abc",
),
cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
cloud_redis.ListInstancesResponse(
instances=[cloud_redis.Instance(),], next_page_token="ghi",
),
cloud_redis.ListInstancesResponse(
instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
),
RuntimeError,
)
pages = list(client.list_instances(request={}).pages)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_instances_async_pager():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_redis.ListInstancesResponse(
instances=[
cloud_redis.Instance(),
cloud_redis.Instance(),
cloud_redis.Instance(),
],
next_page_token="abc",
),
cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
cloud_redis.ListInstancesResponse(
instances=[cloud_redis.Instance(),], next_page_token="ghi",
),
cloud_redis.ListInstancesResponse(
instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
),
RuntimeError,
)
async_pager = await client.list_instances(request={},)
assert async_pager.next_page_token == "abc"
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, cloud_redis.Instance) for i in responses)
@pytest.mark.asyncio
async def test_list_instances_async_pages():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_instances), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
cloud_redis.ListInstancesResponse(
instances=[
cloud_redis.Instance(),
cloud_redis.Instance(),
cloud_redis.Instance(),
],
next_page_token="abc",
),
cloud_redis.ListInstancesResponse(instances=[], next_page_token="def",),
cloud_redis.ListInstancesResponse(
instances=[cloud_redis.Instance(),], next_page_token="ghi",
),
cloud_redis.ListInstancesResponse(
instances=[cloud_redis.Instance(), cloud_redis.Instance(),],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_instances(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
assert page_.raw_page.next_page_token == token
def test_get_instance(
transport: str = "grpc", request_type=cloud_redis.GetInstanceRequest
):
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_redis.Instance(
name="name_value",
display_name="display_name_value",
location_id="location_id_value",
alternative_location_id="alternative_location_id_value",
redis_version="redis_version_value",
reserved_ip_range="reserved_ip_range_value",
host="host_value",
port=453,
current_location_id="current_location_id_value",
state=cloud_redis.Instance.State.CREATING,
status_message="status_message_value",
tier=cloud_redis.Instance.Tier.BASIC,
memory_size_gb=1499,
authorized_network="authorized_network_value",
persistence_iam_identity="persistence_iam_identity_value",
connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING,
)
response = client.get_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.GetInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_redis.Instance)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.location_id == "location_id_value"
assert response.alternative_location_id == "alternative_location_id_value"
assert response.redis_version == "redis_version_value"
assert response.reserved_ip_range == "reserved_ip_range_value"
assert response.host == "host_value"
assert response.port == 453
assert response.current_location_id == "current_location_id_value"
assert response.state == cloud_redis.Instance.State.CREATING
assert response.status_message == "status_message_value"
assert response.tier == cloud_redis.Instance.Tier.BASIC
assert response.memory_size_gb == 1499
assert response.authorized_network == "authorized_network_value"
assert response.persistence_iam_identity == "persistence_iam_identity_value"
assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING
def test_get_instance_from_dict():
test_get_instance(request_type=dict)
def test_get_instance_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
client.get_instance()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.GetInstanceRequest()
@pytest.mark.asyncio
async def test_get_instance_async(
transport: str = "grpc_asyncio", request_type=cloud_redis.GetInstanceRequest
):
client = CloudRedisAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_redis.Instance(
name="name_value",
display_name="display_name_value",
location_id="location_id_value",
alternative_location_id="alternative_location_id_value",
redis_version="redis_version_value",
reserved_ip_range="reserved_ip_range_value",
host="host_value",
port=453,
current_location_id="current_location_id_value",
state=cloud_redis.Instance.State.CREATING,
status_message="status_message_value",
tier=cloud_redis.Instance.Tier.BASIC,
memory_size_gb=1499,
authorized_network="authorized_network_value",
persistence_iam_identity="persistence_iam_identity_value",
connect_mode=cloud_redis.Instance.ConnectMode.DIRECT_PEERING,
)
)
response = await client.get_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.GetInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, cloud_redis.Instance)
assert response.name == "name_value"
assert response.display_name == "display_name_value"
assert response.location_id == "location_id_value"
assert response.alternative_location_id == "alternative_location_id_value"
assert response.redis_version == "redis_version_value"
assert response.reserved_ip_range == "reserved_ip_range_value"
assert response.host == "host_value"
assert response.port == 453
assert response.current_location_id == "current_location_id_value"
assert response.state == cloud_redis.Instance.State.CREATING
assert response.status_message == "status_message_value"
assert response.tier == cloud_redis.Instance.Tier.BASIC
assert response.memory_size_gb == 1499
assert response.authorized_network == "authorized_network_value"
assert response.persistence_iam_identity == "persistence_iam_identity_value"
assert response.connect_mode == cloud_redis.Instance.ConnectMode.DIRECT_PEERING
@pytest.mark.asyncio
async def test_get_instance_async_from_dict():
await test_get_instance_async(request_type=dict)
def test_get_instance_field_headers():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.GetInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
call.return_value = cloud_redis.Instance()
client.get_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_get_instance_field_headers_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.GetInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_redis.Instance()
)
await client.get_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_get_instance_flattened():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_redis.Instance()
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.get_instance(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_get_instance_flattened_error():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.get_instance(
cloud_redis.GetInstanceRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_get_instance_flattened_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.get_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = cloud_redis.Instance()
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
cloud_redis.Instance()
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.get_instance(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_get_instance_flattened_error_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.get_instance(
cloud_redis.GetInstanceRequest(), name="name_value",
)
def test_create_instance(
transport: str = "grpc", request_type=cloud_redis.CreateInstanceRequest
):
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.create_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.CreateInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_instance_from_dict():
test_create_instance(request_type=dict)
def test_create_instance_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
client.create_instance()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.CreateInstanceRequest()
@pytest.mark.asyncio
async def test_create_instance_async(
transport: str = "grpc_asyncio", request_type=cloud_redis.CreateInstanceRequest
):
client = CloudRedisAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.create_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.CreateInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_instance_async_from_dict():
await test_create_instance_async(request_type=dict)
def test_create_instance_field_headers():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.CreateInstanceRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.create_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_create_instance_field_headers_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.CreateInstanceRequest()
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.create_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
def test_create_instance_flattened():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.create_instance(
parent="parent_value",
instance_id="instance_id_value",
instance=cloud_redis.Instance(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].instance_id == "instance_id_value"
assert args[0].instance == cloud_redis.Instance(name="name_value")
def test_create_instance_flattened_error():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.create_instance(
cloud_redis.CreateInstanceRequest(),
parent="parent_value",
instance_id="instance_id_value",
instance=cloud_redis.Instance(name="name_value"),
)
@pytest.mark.asyncio
async def test_create_instance_flattened_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.create_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.create_instance(
parent="parent_value",
instance_id="instance_id_value",
instance=cloud_redis.Instance(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].parent == "parent_value"
assert args[0].instance_id == "instance_id_value"
assert args[0].instance == cloud_redis.Instance(name="name_value")
@pytest.mark.asyncio
async def test_create_instance_flattened_error_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.create_instance(
cloud_redis.CreateInstanceRequest(),
parent="parent_value",
instance_id="instance_id_value",
instance=cloud_redis.Instance(name="name_value"),
)
def test_update_instance(
transport: str = "grpc", request_type=cloud_redis.UpdateInstanceRequest
):
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.update_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.UpdateInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_update_instance_from_dict():
test_update_instance(request_type=dict)
def test_update_instance_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_instance), "__call__") as call:
client.update_instance()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.UpdateInstanceRequest()
@pytest.mark.asyncio
async def test_update_instance_async(
transport: str = "grpc_asyncio", request_type=cloud_redis.UpdateInstanceRequest
):
client = CloudRedisAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.update_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.UpdateInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_update_instance_async_from_dict():
await test_update_instance_async(request_type=dict)
def test_update_instance_field_headers():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.UpdateInstanceRequest()
request.instance.name = "instance.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.update_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[
"metadata"
]
@pytest.mark.asyncio
async def test_update_instance_field_headers_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.UpdateInstanceRequest()
request.instance.name = "instance.name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.update_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "instance.name=instance.name/value",) in kw[
"metadata"
]
def test_update_instance_flattened():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.update_instance(
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
instance=cloud_redis.Instance(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
assert args[0].instance == cloud_redis.Instance(name="name_value")
def test_update_instance_flattened_error():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.update_instance(
cloud_redis.UpdateInstanceRequest(),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
instance=cloud_redis.Instance(name="name_value"),
)
@pytest.mark.asyncio
async def test_update_instance_flattened_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.update_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.update_instance(
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
instance=cloud_redis.Instance(name="name_value"),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"])
assert args[0].instance == cloud_redis.Instance(name="name_value")
@pytest.mark.asyncio
async def test_update_instance_flattened_error_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.update_instance(
cloud_redis.UpdateInstanceRequest(),
update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]),
instance=cloud_redis.Instance(name="name_value"),
)
def test_upgrade_instance(
transport: str = "grpc", request_type=cloud_redis.UpgradeInstanceRequest
):
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.upgrade_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.UpgradeInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_upgrade_instance_from_dict():
test_upgrade_instance(request_type=dict)
def test_upgrade_instance_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
client.upgrade_instance()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.UpgradeInstanceRequest()
@pytest.mark.asyncio
async def test_upgrade_instance_async(
transport: str = "grpc_asyncio", request_type=cloud_redis.UpgradeInstanceRequest
):
client = CloudRedisAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.upgrade_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.UpgradeInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_upgrade_instance_async_from_dict():
await test_upgrade_instance_async(request_type=dict)
def test_upgrade_instance_field_headers():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.UpgradeInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.upgrade_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_upgrade_instance_field_headers_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.UpgradeInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.upgrade_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_upgrade_instance_flattened():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.upgrade_instance(
name="name_value", redis_version="redis_version_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].redis_version == "redis_version_value"
def test_upgrade_instance_flattened_error():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.upgrade_instance(
cloud_redis.UpgradeInstanceRequest(),
name="name_value",
redis_version="redis_version_value",
)
@pytest.mark.asyncio
async def test_upgrade_instance_flattened_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.upgrade_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.upgrade_instance(
name="name_value", redis_version="redis_version_value",
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].redis_version == "redis_version_value"
@pytest.mark.asyncio
async def test_upgrade_instance_flattened_error_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.upgrade_instance(
cloud_redis.UpgradeInstanceRequest(),
name="name_value",
redis_version="redis_version_value",
)
def test_import_instance(
transport: str = "grpc", request_type=cloud_redis.ImportInstanceRequest
):
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.import_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.import_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.ImportInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_import_instance_from_dict():
test_import_instance(request_type=dict)
def test_import_instance_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.import_instance), "__call__") as call:
client.import_instance()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.ImportInstanceRequest()
@pytest.mark.asyncio
async def test_import_instance_async(
transport: str = "grpc_asyncio", request_type=cloud_redis.ImportInstanceRequest
):
client = CloudRedisAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.import_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.import_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.ImportInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_import_instance_async_from_dict():
await test_import_instance_async(request_type=dict)
def test_import_instance_field_headers():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.ImportInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.import_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.import_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_import_instance_field_headers_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.ImportInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.import_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.import_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_import_instance_flattened():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.import_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.import_instance(
name="name_value",
input_config=cloud_redis.InputConfig(
gcs_source=cloud_redis.GcsSource(uri="uri_value")
),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].input_config == cloud_redis.InputConfig(
gcs_source=cloud_redis.GcsSource(uri="uri_value")
)
def test_import_instance_flattened_error():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.import_instance(
cloud_redis.ImportInstanceRequest(),
name="name_value",
input_config=cloud_redis.InputConfig(
gcs_source=cloud_redis.GcsSource(uri="uri_value")
),
)
@pytest.mark.asyncio
async def test_import_instance_flattened_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.import_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.import_instance(
name="name_value",
input_config=cloud_redis.InputConfig(
gcs_source=cloud_redis.GcsSource(uri="uri_value")
),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].input_config == cloud_redis.InputConfig(
gcs_source=cloud_redis.GcsSource(uri="uri_value")
)
@pytest.mark.asyncio
async def test_import_instance_flattened_error_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.import_instance(
cloud_redis.ImportInstanceRequest(),
name="name_value",
input_config=cloud_redis.InputConfig(
gcs_source=cloud_redis.GcsSource(uri="uri_value")
),
)
def test_export_instance(
transport: str = "grpc", request_type=cloud_redis.ExportInstanceRequest
):
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.export_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.export_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.ExportInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_export_instance_from_dict():
test_export_instance(request_type=dict)
def test_export_instance_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.export_instance), "__call__") as call:
client.export_instance()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.ExportInstanceRequest()
@pytest.mark.asyncio
async def test_export_instance_async(
transport: str = "grpc_asyncio", request_type=cloud_redis.ExportInstanceRequest
):
client = CloudRedisAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.export_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.export_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.ExportInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_export_instance_async_from_dict():
await test_export_instance_async(request_type=dict)
def test_export_instance_field_headers():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.ExportInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.export_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.export_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_export_instance_field_headers_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.ExportInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.export_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.export_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_export_instance_flattened():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.export_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.export_instance(
name="name_value",
output_config=cloud_redis.OutputConfig(
gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].output_config == cloud_redis.OutputConfig(
gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
)
def test_export_instance_flattened_error():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.export_instance(
cloud_redis.ExportInstanceRequest(),
name="name_value",
output_config=cloud_redis.OutputConfig(
gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
),
)
@pytest.mark.asyncio
async def test_export_instance_flattened_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.export_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.export_instance(
name="name_value",
output_config=cloud_redis.OutputConfig(
gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
),
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert args[0].output_config == cloud_redis.OutputConfig(
gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
)
@pytest.mark.asyncio
async def test_export_instance_flattened_error_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.export_instance(
cloud_redis.ExportInstanceRequest(),
name="name_value",
output_config=cloud_redis.OutputConfig(
gcs_destination=cloud_redis.GcsDestination(uri="uri_value")
),
)
def test_failover_instance(
transport: str = "grpc", request_type=cloud_redis.FailoverInstanceRequest
):
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.failover_instance), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.failover_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.FailoverInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_failover_instance_from_dict():
test_failover_instance(request_type=dict)
def test_failover_instance_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.failover_instance), "__call__"
) as call:
client.failover_instance()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.FailoverInstanceRequest()
@pytest.mark.asyncio
async def test_failover_instance_async(
transport: str = "grpc_asyncio", request_type=cloud_redis.FailoverInstanceRequest
):
client = CloudRedisAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.failover_instance), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.failover_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.FailoverInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_failover_instance_async_from_dict():
await test_failover_instance_async(request_type=dict)
def test_failover_instance_field_headers():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.FailoverInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.failover_instance), "__call__"
) as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.failover_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_failover_instance_field_headers_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.FailoverInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.failover_instance), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.failover_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_failover_instance_flattened():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.failover_instance), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.failover_instance(
name="name_value",
data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert (
args[0].data_protection_mode
== cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS
)
def test_failover_instance_flattened_error():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.failover_instance(
cloud_redis.FailoverInstanceRequest(),
name="name_value",
data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
)
@pytest.mark.asyncio
async def test_failover_instance_flattened_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.failover_instance), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.failover_instance(
name="name_value",
data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
assert (
args[0].data_protection_mode
== cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS
)
@pytest.mark.asyncio
async def test_failover_instance_flattened_error_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.failover_instance(
cloud_redis.FailoverInstanceRequest(),
name="name_value",
data_protection_mode=cloud_redis.FailoverInstanceRequest.DataProtectionMode.LIMITED_DATA_LOSS,
)
def test_delete_instance(
transport: str = "grpc", request_type=cloud_redis.DeleteInstanceRequest
):
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.delete_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.DeleteInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_instance_from_dict():
test_delete_instance(request_type=dict)
def test_delete_instance_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
client.delete_instance()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.DeleteInstanceRequest()
@pytest.mark.asyncio
async def test_delete_instance_async(
transport: str = "grpc_asyncio", request_type=cloud_redis.DeleteInstanceRequest
):
client = CloudRedisAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.delete_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == cloud_redis.DeleteInstanceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_instance_async_from_dict():
await test_delete_instance_async(request_type=dict)
def test_delete_instance_field_headers():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.DeleteInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
call.return_value = operations_pb2.Operation(name="operations/op")
client.delete_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
@pytest.mark.asyncio
async def test_delete_instance_field_headers_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = cloud_redis.DeleteInstanceRequest()
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/op")
)
await client.delete_instance(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
def test_delete_instance_flattened():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.delete_instance(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
def test_delete_instance_flattened_error():
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.delete_instance(
cloud_redis.DeleteInstanceRequest(), name="name_value",
)
@pytest.mark.asyncio
async def test_delete_instance_flattened_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.delete_instance), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.delete_instance(name="name_value",)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0].name == "name_value"
@pytest.mark.asyncio
async def test_delete_instance_flattened_error_async():
client = CloudRedisAsyncClient(credentials=ga_credentials.AnonymousCredentials(),)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.delete_instance(
cloud_redis.DeleteInstanceRequest(), name="name_value",
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.CloudRedisGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.CloudRedisGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudRedisClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.CloudRedisGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = CloudRedisClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.CloudRedisGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = CloudRedisClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.CloudRedisGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.CloudRedisGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport,],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = CloudRedisClient(credentials=ga_credentials.AnonymousCredentials(),)
assert isinstance(client.transport, transports.CloudRedisGrpcTransport,)
def test_cloud_redis_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.CloudRedisTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_cloud_redis_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.redis_v1beta1.services.cloud_redis.transports.CloudRedisTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.CloudRedisTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
"list_instances",
"get_instance",
"create_instance",
"update_instance",
"upgrade_instance",
"import_instance",
"export_instance",
"failover_instance",
"delete_instance",
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
@requires_google_auth_gte_1_25_0
def test_cloud_redis_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.redis_v1beta1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.CloudRedisTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_cloud_redis_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.redis_v1beta1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.CloudRedisTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_cloud_redis_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.redis_v1beta1.services.cloud_redis.transports.CloudRedisTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.CloudRedisTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_cloud_redis_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
CloudRedisClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_cloud_redis_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
CloudRedisClient()
adc.assert_called_once_with(
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport,],
)
@requires_google_auth_gte_1_25_0
def test_cloud_redis_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport,],
)
@requires_google_auth_lt_1_25_0
def test_cloud_redis_transport_auth_adc_old_google_auth(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus")
adc.assert_called_once_with(
scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.CloudRedisGrpcTransport, grpc_helpers),
(transports.CloudRedisGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_cloud_redis_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"redis.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=["1", "2"],
default_host="redis.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport],
)
def test_cloud_redis_grpc_transport_client_cert_source_for_mtls(transport_class):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_cloud_redis_host_no_port():
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="redis.googleapis.com"
),
)
assert client.transport._host == "redis.googleapis.com:443"
def test_cloud_redis_host_with_port():
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="redis.googleapis.com:8000"
),
)
assert client.transport._host == "redis.googleapis.com:8000"
def test_cloud_redis_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.CloudRedisGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_cloud_redis_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.CloudRedisGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport],
)
def test_cloud_redis_transport_channel_mtls_with_client_cert_source(transport_class):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[transports.CloudRedisGrpcTransport, transports.CloudRedisGrpcAsyncIOTransport],
)
def test_cloud_redis_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_cloud_redis_grpc_lro_client():
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_cloud_redis_grpc_lro_async_client():
client = CloudRedisAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_instance_path():
project = "squid"
location = "clam"
instance = "whelk"
expected = "projects/{project}/locations/{location}/instances/{instance}".format(
project=project, location=location, instance=instance,
)
actual = CloudRedisClient.instance_path(project, location, instance)
assert expected == actual
def test_parse_instance_path():
expected = {
"project": "octopus",
"location": "oyster",
"instance": "nudibranch",
}
path = CloudRedisClient.instance_path(**expected)
# Check that the path construction is reversible.
actual = CloudRedisClient.parse_instance_path(path)
assert expected == actual
def test_common_billing_account_path():
billing_account = "cuttlefish"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = CloudRedisClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "mussel",
}
path = CloudRedisClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = CloudRedisClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "winkle"
expected = "folders/{folder}".format(folder=folder,)
actual = CloudRedisClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "nautilus",
}
path = CloudRedisClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = CloudRedisClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "scallop"
expected = "organizations/{organization}".format(organization=organization,)
actual = CloudRedisClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "abalone",
}
path = CloudRedisClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = CloudRedisClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "squid"
expected = "projects/{project}".format(project=project,)
actual = CloudRedisClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "clam",
}
path = CloudRedisClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = CloudRedisClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "whelk"
location = "octopus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = CloudRedisClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "oyster",
"location": "nudibranch",
}
path = CloudRedisClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = CloudRedisClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.CloudRedisTransport, "_prep_wrapped_messages"
) as prep:
client = CloudRedisClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.CloudRedisTransport, "_prep_wrapped_messages"
) as prep:
transport_class = CloudRedisClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
|
sangwook236/SWDT
|
refs/heads/master
|
sw_dev/python/ext/test/database/leveldb_main.py
|
1
|
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import json
import numpy as np
#import caffe
import leveldb
def write_to_db_example(use_caffe_datum=False):
N = 1000
X = np.zeros((N, 3, 32, 32), dtype=np.uint8)
y = np.zeros(N, dtype=np.int64)
leveldb_dir_path = './myleveldb'
db = leveldb.LevelDB(leveldb_dir_path, create_if_missing=True)
if use_caffe_datum:
#import caffe
import caffe_pb2
for i in range(N):
# REF [site] >> https://github.com/BVLC/caffe/blob/master/src/caffe/proto/caffe.proto
#datum = caffe.proto.caffe_pb2.Datum()
datum = caffe_pb2.Datum()
datum.channels = X.shape[1]
datum.height = X.shape[2]
datum.width = X.shape[3]
datum.data = X[i].tobytes() # or .tostring() if numpy < 1.9.
datum.label = int(y[i])
str_id = '{:08}'.format(i)
# The encode is only essential in Python 3.
db.Put(str_id.encode('ascii'), datum.SerializeToString())
else:
for i in range(N):
datum = {
'channels': X.shape[1],
'height': X.shape[2],
'width': X.shape[3],
'data': X[i].tolist(),
'label': int(y[i]),
}
str_id = '{:08}'.format(i)
# The encode is only essential in Python 3.
db.Put(str_id.encode('ascii'), json.dumps(datum).encode('ascii'))
#db.Delete(b'00000000')
#--------------------
print(db.GetStats())
def read_from_db_example(use_caffe_datum=False):
leveldb_dir_path = './myleveldb'
db = leveldb.LevelDB(leveldb_dir_path, create_if_missing=True)
key = b'00000000'
try:
raw_datum = db.Get(key)
except KeyError as ex:
print('Invalid key, {}.'.format(key))
return
if use_caffe_datum:
#import caffe
import caffe_pb2
# REF [site] >> https://github.com/BVLC/caffe/blob/master/src/caffe/proto/caffe.proto
#datum = caffe.proto.caffe_pb2.Datum()
datum = caffe_pb2.Datum()
datum.ParseFromString(raw_datum)
x = np.fromstring(datum.data, dtype=np.uint8)
x = x.reshape(datum.channels, datum.height, datum.width)
y = datum.label
else:
datum = json.loads(raw_datum.decode('ascii'))
x = np.array(datum['data'], dtype=np.uint8)
x = x.reshape(datum['channels'], datum['height'], datum['width'])
y = datum['label']
print(x.shape, y)
def key_value_example(use_caffe_datum=False):
leveldb_dir_path = './myleveldb'
db = leveldb.LevelDB(leveldb_dir_path, create_if_missing=True)
if use_caffe_datum:
#import caffe
import caffe_pb2
for k, v in db.RangeIter():
# REF [site] >> https://github.com/BVLC/caffe/blob/master/src/caffe/proto/caffe.proto
#datum = caffe.proto.caffe_pb2.Datum()
datum = caffe_pb2.Datum()
datum.ParseFromString(v)
x = np.fromstring(datum.data, dtype=np.uint8)
x = x.reshape(datum.channels, datum.height, datum.width)
y = datum.label
print(k.decode(), x.shape, y)
else:
for k, v in db.RangeIter():
datum = json.loads(v.decode('ascii'))
x = np.array(datum['data'], dtype=np.uint8)
x = x.reshape(datum['channels'], datum['height'], datum['width'])
y = datum['label']
print(k.decode(), x.shape, y)
def main():
# Usage:
# For using Caffe Datum:
# protoc --python_out=. caffe.proto
use_caffe_datum = False
write_to_db_example(use_caffe_datum)
#read_from_db_example(use_caffe_datum)
#key_value_example(use_caffe_datum)
#--------------------------------------------------------------------
if '__main__' == __name__:
main()
|
viswimmer1/PythonGenerator
|
refs/heads/master
|
data/python_files/29981338/image.py
|
1
|
import subprocess as sp
import os
import logging
import time
import glob
from lowleveltools import *
from inject import inject as injectIntoRootDisk
_logger = logging.getLogger('image')
def createDiskImages(vg, id, rootSize, swapSize, dataSize, source):
try:
t0 = time.time()
rootDisk = '{0}_root'.format(id)
swapDisk = '{0}_swap'.format(id)
dataDisk = '{0}_data'.format(id)
lvcreateSnapshot(vg, source, rootSize, rootDisk)
lvcreate(vg, swapSize, swapDisk)
lvcreate(vg, dataSize, dataDisk)
waitForUdev()
dt = time.time() - t0
_logger.info('Created disk images for "vm{0}" ({1:.1f}s).'.format(id, dt))
except:
_logger.error('Failed to create disk images for "vm{0}". Cleaning up...'.format(id))
removeDiskImages(vg, id)
raise
def getMapperPath(vg, id, disk=None, partition=None):
p = '/dev/mapper/{0}-{1}'.format(vg, id)
if disk is not None:
p += '_' + disk
if partition:
assert disk is not None
p += str(partition)
return p
def removeDiskImages(vg, id):
rootDisk = '{0}_root'.format(id)
swapDisk = '{0}_swap'.format(id)
dataDisk = '{0}_data'.format(id)
waitForUdev() # some udev trigger sometimes prevents us from removing the logical volumes. This should avoid the internal retries of lvremove.
for disk in [rootDisk, swapDisk, dataDisk]:
_removeDiskImage(vg, id, disk)
def _removeDiskImage(vg, id, disk):
try:
_removeDiskImage_recoverFromFailure_kpartx(vg, id, disk)
lvremove(vg, disk)
_logger.info('Removed disk "{0}" of "vm{1}".'.format(disk, id))
except:
msg = 'Could not remove disk "{0}" of "vm{1}". This may lead to other errors later.'.format(disk, id)
_logger.error(msg)
def _removeDiskImage_recoverFromFailure_kpartx(vg, id, disk):
diskPath = getMapperPath(vg, disk)
if not glob.glob(diskPath + '?'):
return
msg = 'Trying to remove disk "{0}" of "vm{1}" even though there is still at least one partition entry there. Something is broken... Trying to recover.'.format(disk, id)
_logger.error(msg)
retries = 5
for i in range(retries):
try:
kpartxRemove(diskPath)
return
except:
pass
time.sleep(0.5)
msg = 'Failed to recover from kpartx error. Giving up on disk "{0}" of "vm{1}".'.format(disk, id)
_logger.error(msg)
def prepareRootDisk(vg, id, injectorName, authorizedKeys, networkConfig):
expectedPathsOnRootDisk = ['bin', 'etc', 'lib', 'root']
try:
t0 = time.time()
disk = 'root'
blockDev = getMapperPath(vg, id, disk)
with kpartx(blockDev):
for x in glob.glob(getMapperPath(vg, id, disk, '?')):
with mount(x, mountPoint=None) as mountPoint:
checks = [os.path.exists(os.path.join(mountPoint, y)) for y in expectedPathsOnRootDisk]
if not all(checks):
continue
# TODO return finger prints of public keys
injectIntoRootDisk(injectorName, mountPoint, id, authorizedKeys, networkConfig)
break
dt = time.time() - t0
_logger.info('Prepared root disk of "vm{0}" ({1:.1f}s).'.format(id, dt))
except:
_logger.error('Failed to prepare root disk of "vm{0}".'.format(id))
raise
def prepareSwapDisk(vg, id):
try:
t0 = time.time()
disk = 'swap'
blockDev = getMapperPath(vg, id, disk)
partedCreateLabel(blockDev)
partedCreatePartition(blockDev)
with kpartx(blockDev):
formatPartition(getMapperPath(vg, id, disk, 1), 'swap')
dt = time.time() - t0
_logger.info('Prepared swap disk of "vm{0}" ({1:.1f}s).'.format(id, dt))
except:
_logger.error('Failed to prepare swap disk of "vm{0}".'.format(id))
raise
def prepareDataDisk(vg, id):
try:
t0 = time.time()
disk = 'data'
blockDev = getMapperPath(vg, id, disk)
partedCreateLabel(blockDev)
partedCreatePartition(blockDev)
with kpartx(blockDev):
formatPartition(getMapperPath(vg, id, disk, 1), 'ext4')
dt = time.time() - t0
_logger.info('Prepared data disk of "vm{0}" ({1:.1f}s).'.format(disk, dt))
except:
_logger.error('Failed to prepare data disk of "vm{0}".'.format(id))
raise
|
mattvonrocketstein/smash
|
refs/heads/master
|
smashlib/ipy3x/utils/tokenutil.py
|
1
|
"""Token-related utilities"""
# Copyright (c) IPython Development Team.
# Distributed under the terms of the Modified BSD License.
from __future__ import absolute_import, print_function
from collections import namedtuple
from io import StringIO
from keyword import iskeyword
from . import tokenize2
from .py3compat import cast_unicode_py2
Token = namedtuple('Token', ['token', 'text', 'start', 'end', 'line'])
def generate_tokens(readline):
"""wrap generate_tokens to catch EOF errors"""
try:
for token in tokenize2.generate_tokens(readline):
yield token
except tokenize2.TokenError:
# catch EOF error
return
def line_at_cursor(cell, cursor_pos=0):
"""Return the line in a cell at a given cursor position
Used for calling line-based APIs that don't support multi-line input, yet.
Parameters
----------
cell: text
multiline block of text
cursor_pos: integer
the cursor position
Returns
-------
(line, offset): (text, integer)
The line with the current cursor, and the character offset of the start of the line.
"""
offset = 0
lines = cell.splitlines(True)
for line in lines:
next_offset = offset + len(line)
if next_offset >= cursor_pos:
break
offset = next_offset
else:
line = ""
return (line, offset)
def token_at_cursor(cell, cursor_pos=0):
"""Get the token at a given cursor
Used for introspection.
Parameters
----------
cell : unicode
A block of Python code
cursor_pos : int
The location of the cursor in the block where the token should be found
"""
cell = cast_unicode_py2(cell)
names = []
tokens = []
offset = 0
for tup in generate_tokens(StringIO(cell).readline):
tok = Token(*tup)
# token, text, start, end, line = tup
start_col = tok.start[1]
end_col = tok.end[1]
# allow '|foo' to find 'foo' at the beginning of a line
boundary = cursor_pos + 1 if start_col == 0 else cursor_pos
if offset + start_col >= boundary:
# current token starts after the cursor,
# don't consume it
break
if tok.token == tokenize2.NAME and not iskeyword(tok.text):
if names and tokens and tokens[-1].token == tokenize2.OP and tokens[-1].text == '.':
names[-1] = "%s.%s" % (names[-1], tok.text)
else:
names.append(tok.text)
elif tok.token == tokenize2.OP:
if tok.text == '=' and names:
# don't inspect the lhs of an assignment
names.pop(-1)
if offset + end_col > cursor_pos:
# we found the cursor, stop reading
break
tokens.append(tok)
if tok.token == tokenize2.NEWLINE:
offset += len(tok.line)
if names:
return names[-1]
else:
return ''
|
apophys/freeipa
|
refs/heads/master
|
ipaserver/dnssec/keysyncer.py
|
1
|
#
# Copyright (C) 2014 FreeIPA Contributors see COPYING for license
#
import logging
import ldap.dn
import os
import dns.name
from ipaplatform.paths import paths
from ipapython import ipautil
from ipaserver.dnssec.syncrepl import SyncReplConsumer
from ipaserver.dnssec.odsmgr import ODSMgr
from ipaserver.dnssec.bindmgr import BINDMgr
logger = logging.getLogger(__name__)
SIGNING_ATTR = 'idnsSecInlineSigning'
OBJCLASS_ATTR = 'objectClass'
class KeySyncer(SyncReplConsumer):
def __init__(self, *args, **kwargs):
# hack
self.api = kwargs['ipa_api']
del kwargs['ipa_api']
# DNSSEC master should have OpenDNSSEC installed
# TODO: Is this the best way?
if os.environ.get('ISMASTER', '0') == '1':
self.ismaster = True
self.odsmgr = ODSMgr()
else:
self.ismaster = False
self.bindmgr = BINDMgr(self.api)
self.init_done = False
self.dnssec_zones = set()
SyncReplConsumer.__init__(self, *args, **kwargs)
def _get_objclass(self, attrs):
"""Get object class.
Given set of attributes has to have exactly one supported object class.
"""
supported_objclasses = set(['idnszone', 'idnsseckey', 'ipk11publickey'])
present_objclasses = set([o.lower() for o in attrs[OBJCLASS_ATTR]]).intersection(supported_objclasses)
assert len(present_objclasses) == 1, attrs[OBJCLASS_ATTR]
return present_objclasses.pop()
def __get_signing_attr(self, attrs):
"""Get SIGNING_ATTR from dictionary with LDAP zone attributes.
Returned value is normalized to TRUE or FALSE, defaults to FALSE."""
values = attrs.get(SIGNING_ATTR, ['FALSE'])
assert len(values) == 1, '%s is expected to be single-valued' \
% SIGNING_ATTR
return values[0].upper()
def __is_dnssec_enabled(self, attrs):
"""Test if LDAP DNS zone with given attributes is DNSSEC enabled."""
return self.__get_signing_attr(attrs) == 'TRUE'
def __is_replica_pubkey(self, attrs):
vals = attrs.get('ipk11label', [])
if len(vals) != 1:
return False
return vals[0].startswith('dnssec-replica:')
def application_add(self, uuid, dn, newattrs):
objclass = self._get_objclass(newattrs)
if objclass == 'idnszone':
self.zone_add(uuid, dn, newattrs)
elif objclass == 'idnsseckey':
self.key_meta_add(uuid, dn, newattrs)
elif objclass == 'ipk11publickey' and \
self.__is_replica_pubkey(newattrs):
self.hsm_master_sync()
def application_del(self, uuid, dn, oldattrs):
objclass = self._get_objclass(oldattrs)
if objclass == 'idnszone':
self.zone_del(uuid, dn, oldattrs)
elif objclass == 'idnsseckey':
self.key_meta_del(uuid, dn, oldattrs)
elif objclass == 'ipk11publickey' and \
self.__is_replica_pubkey(oldattrs):
self.hsm_master_sync()
def application_sync(self, uuid, dn, newattrs, oldattrs):
objclass = self._get_objclass(oldattrs)
if objclass == 'idnszone':
olddn = ldap.dn.str2dn(oldattrs['dn'])
newdn = ldap.dn.str2dn(newattrs['dn'])
assert olddn == newdn, 'modrdn operation is not supported'
oldval = self.__get_signing_attr(oldattrs)
newval = self.__get_signing_attr(newattrs)
if oldval != newval:
if self.__is_dnssec_enabled(newattrs):
self.zone_add(uuid, olddn, newattrs)
else:
self.zone_del(uuid, olddn, oldattrs)
elif objclass == 'idnsseckey':
self.key_metadata_sync(uuid, dn, oldattrs, newattrs)
elif objclass == 'ipk11publickey' and \
self.__is_replica_pubkey(newattrs):
self.hsm_master_sync()
def syncrepl_refreshdone(self):
logger.info('Initial LDAP dump is done, sychronizing with ODS and '
'BIND')
self.init_done = True
self.ods_sync()
self.hsm_replica_sync()
self.hsm_master_sync()
self.bindmgr.sync(self.dnssec_zones)
# idnsSecKey wrapper
# Assumption: metadata points to the same key blob all the time,
# i.e. it is not necessary to re-download blobs because of change in DNSSEC
# metadata - DNSSEC flags or timestamps.
def key_meta_add(self, uuid, dn, newattrs):
self.hsm_replica_sync()
self.bindmgr.ldap_event('add', uuid, newattrs)
self.bindmgr_sync(self.dnssec_zones)
def key_meta_del(self, uuid, dn, oldattrs):
self.bindmgr.ldap_event('del', uuid, oldattrs)
self.bindmgr_sync(self.dnssec_zones)
self.hsm_replica_sync()
def key_metadata_sync(self, uuid, dn, oldattrs, newattrs):
self.bindmgr.ldap_event('mod', uuid, newattrs)
self.bindmgr_sync(self.dnssec_zones)
def bindmgr_sync(self, dnssec_zones):
if self.init_done:
self.bindmgr.sync(dnssec_zones)
# idnsZone wrapper
def zone_add(self, uuid, dn, newattrs):
zone = dns.name.from_text(newattrs['idnsname'][0])
if self.__is_dnssec_enabled(newattrs):
self.dnssec_zones.add(zone)
else:
self.dnssec_zones.discard(zone)
if not self.ismaster:
return
if self.__is_dnssec_enabled(newattrs):
self.odsmgr.ldap_event('add', uuid, newattrs)
self.ods_sync()
def zone_del(self, uuid, dn, oldattrs):
zone = dns.name.from_text(oldattrs['idnsname'][0])
self.dnssec_zones.discard(zone)
if not self.ismaster:
return
if self.__is_dnssec_enabled(oldattrs):
self.odsmgr.ldap_event('del', uuid, oldattrs)
self.ods_sync()
def ods_sync(self):
if not self.ismaster:
return
if self.init_done:
self.odsmgr.sync()
# triggered by modification to idnsSecKey objects
def hsm_replica_sync(self):
"""Download keys from LDAP to local HSM."""
if self.ismaster:
return
if not self.init_done:
return
ipautil.run([paths.IPA_DNSKEYSYNCD_REPLICA])
# triggered by modification to ipk11PublicKey objects
def hsm_master_sync(self):
"""Download replica keys from LDAP to local HSM
& upload master and zone keys to LDAP."""
if not self.ismaster:
return
if not self.init_done:
return
ipautil.run([paths.ODS_SIGNER, 'ipa-hsm-update'])
|
palmerjh/iEBE
|
refs/heads/master
|
PlayGround/job-2/iSS/for_paraview/lib152/StructuredPoints.py
|
9
|
#!/usr/bin/env python
"""
Copyright 2001 Pearu Peterson all rights reserved,
Pearu Peterson <pearu@ioc.ee>
Permission to use, modify, and distribute this software is given under the
terms of the LGPL. See http://www.fsf.org
NO WARRANTY IS EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK.
$Revision: 1.1 $
$Date: 2001-05-20 12:51:29 $
Pearu Peterson
"""
import DataSet
import string
class StructuredPoints(DataSet.DataSet):
"""The topology of a dataset is described by
dimensions - int|(1-3)-int sequence (>=1)
origin - number|(1-3)-number sequence
spacing - number|(1-3)-number sequence (>0)
"""
def __init__(self,dimensions,origin=(0,0,0),spacing=(1,1,1)):
self.dimensions = self.get_3_tuple(dimensions,(1,1,1))
if self._check_dimensions():
raise ValueError,'dimensions must be 3-tuple of ints >=1'
self.origin = self.get_3_tuple(origin,(1,1,1))
if self._check_origin():
raise ValueError,'origin must be 3-tuple of numbers'
self.spacing = self.get_3_tuple(spacing,(1,1,1))
if self._check_spacing():
raise ValueError,'spacing must be 3-tuple of positive numbers'
def to_string(self,format = 'ascii'):
ret = ['DATASET STRUCTURED_POINTS',
'DIMENSIONS %s %s %s'%self.dimensions,
'ORIGIN %s %s %s'%self.origin,
'SPACING %s %s %s'%self.spacing]
return string.join(ret,'\n')
if __name__ == "__main__":
print StructuredPoints((2,3,4))
print StructuredPoints((2,3))
print StructuredPoints(5)
print StructuredPoints([2,3,5,6]).get_size()
|
liukaijv/XlsxWriter
|
refs/heads/master
|
xlsxwriter/test/comparison/test_format11.py
|
8
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'format11.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test a vertical and horizontal centered format."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
centered = workbook.add_format({
'align': 'center',
'valign': 'vcenter'
})
worksheet.write('B2', 'Foo', centered)
workbook.close()
self.assertExcelEqual()
|
jwren/intellij-community
|
refs/heads/master
|
python/testData/formatter/wrapTuple.py
|
87
|
abracadabra, abracadabra, abracadabra, abracadabra, abracadabra, abracadabra, abracadabra, abracadabra, abracadabra, abracadabra, abracadabra, abracadabra, abracadabra, abracadabra = []
|
wikimedia/integration-jenkins-job-builder
|
refs/heads/master
|
jenkins_jobs/modules/hipchat_notif.py
|
2
|
# Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Enable HipChat notifications of build execution.
Supports hipchat plugin versions < 1.9. Will automatically redirect to the
publishers module for newer versions, but still recommended that you convert
to the newer module.
:Parameters:
* **enabled** *(bool)*: general cut off switch. If not explicitly set to
``true``, no hipchat parameters are written to XML. For Jenkins HipChat
plugin of version prior to 0.1.5, also enables all build results to be
reported in HipChat room. For later plugin versions, explicit notify-*
setting is required (see below).
* **room** *(str)*: name of HipChat room to post messages to (default '')
.. deprecated:: 1.2.0 Please use 'rooms'.
* **rooms** *(list)*: list of HipChat rooms to post messages to
(default empty)
* **start-notify** *(bool)*: post messages about build start event
.. deprecated:: 1.2.0 use notify-start parameter instead
* **notify-start** *(bool)*: post messages about build start event
(default false)
* **notify-success** *(bool)*: post messages about successful build event
(Jenkins HipChat plugin >= 0.1.5) (default false)
* **notify-aborted** *(bool)*: post messages about aborted build event
(Jenkins HipChat plugin >= 0.1.5) (default false)
* **notify-not-built** *(bool)*: post messages about build set to NOT_BUILT
status (Jenkins HipChat plugin >= 0.1.5). This status code is used in a
multi-stage build (like maven2) where a problem in earlier stage prevented
later stages from building. (default false)
* **notify-unstable** *(bool)*: post messages about unstable build event
(Jenkins HipChat plugin >= 0.1.5) (default false)
* **notify-failure** *(bool)*: post messages about build failure event
(Jenkins HipChat plugin >= 0.1.5) (default false)
* **notify-back-to-normal** *(bool)*: post messages about build being back to
normal after being unstable or failed (Jenkins HipChat plugin >= 0.1.5)
(default false)
Example:
.. literalinclude:: /../../tests/hipchat/fixtures/hipchat001.yaml
:language: yaml
"""
# Enabling hipchat notifications on a job requires specifying the hipchat
# config in job properties, and adding the hipchat notifier to the job's
# publishers list.
# The publisher configuration contains extra details not specified per job:
# - the hipchat authorisation token.
# - the jenkins server url.
# - a default room name/id.
# This complicates matters somewhat since the sensible place to store these
# details is in the global config file.
# The global config object is therefore passed down to the registry object,
# and this object is passed to the HipChat() class initialiser.
import logging
import pkg_resources
import sys
import xml.etree.ElementTree as XML
from six.moves import configparser
import jenkins_jobs.errors
import jenkins_jobs.modules.base
from jenkins_jobs.modules.helpers import convert_mapping_to_xml
logger = logging.getLogger(__name__)
class HipChat(jenkins_jobs.modules.base.Base):
sequence = 80
def __init__(self, registry):
self.authToken = None
self.jenkinsUrl = None
self.registry = registry
def _load_global_data(self):
"""Load data from the global config object.
This is done lazily to avoid looking up the '[hipchat]' section
unless actually required.
"""
jjb_config = self.registry.jjb_config
if not self.authToken:
try:
self.authToken = jjb_config.get_plugin_config('hipchat',
'authtoken')
# Require that the authtoken is non-null
if self.authToken == '':
raise jenkins_jobs.errors.JenkinsJobsException(
"Hipchat authtoken must not be a blank string")
except (configparser.NoSectionError,
jenkins_jobs.errors.JenkinsJobsException) as e:
logger.fatal("The configuration file needs a hipchat section" +
" containing authtoken:\n{0}".format(e))
sys.exit(1)
self.jenkinsUrl = jjb_config.get_plugin_config('hipchat', 'url')
self.sendAs = jjb_config.get_plugin_config('hipchat', 'send-as')
def gen_xml(self, xml_parent, data):
hipchat = data.get('hipchat')
if not hipchat or not hipchat.get('enabled', True):
return
self._load_global_data()
# convert for compatibility before dispatch
if 'room' in hipchat:
if 'rooms' in hipchat:
logger.warning("Ignoring deprecated 'room' as 'rooms' also "
"defined.")
else:
logger.warning("'room' is deprecated, please use 'rooms'")
hipchat['rooms'] = [hipchat['room']]
plugin_info = self.registry.get_plugin_info("Jenkins HipChat Plugin")
version = pkg_resources.parse_version(plugin_info.get('version', '0'))
if version >= pkg_resources.parse_version("0.1.9"):
publishers = xml_parent.find('publishers')
if publishers is None:
publishers = XML.SubElement(xml_parent, 'publishers')
logger.warning(
"'hipchat' module supports the old plugin versions <1.9, "
"newer versions are supported via the 'publishers' module. "
"Please upgrade you job definition")
component = {'hipchat': hipchat}
return self.registry.dispatch('publisher', publishers, component)
else:
properties = xml_parent.find('properties')
if properties is None:
properties = XML.SubElement(xml_parent, 'properties')
pdefhip = XML.SubElement(properties,
'jenkins.plugins.hipchat.'
'HipChatNotifier_-HipChatJobProperty')
room = XML.SubElement(pdefhip, 'room')
if 'rooms' in hipchat:
room.text = ",".join(hipchat['rooms'])
# Handle backwards compatibility 'start-notify' but all add an element
# of standardization with notify-*
if hipchat.get('start-notify'):
logger.warning("'start-notify' is deprecated, please use "
"'notify-start'")
XML.SubElement(pdefhip, 'startNotification').text = str(
hipchat.get('notify-start', hipchat.get('start-notify',
False))).lower()
if version >= pkg_resources.parse_version("0.1.5"):
mapping = [
('notify-success', 'notifySuccess', False),
('notify-aborted', 'notifyAborted', False),
('notify-not-built', 'notifyNotBuilt', False),
('notify-unstable', 'notifyUnstable', False),
('notify-failure', 'notifyFailure', False),
('notify-back-to-normal', 'notifyBackToNormal', False)]
convert_mapping_to_xml(pdefhip,
hipchat, mapping, fail_required=True)
publishers = xml_parent.find('publishers')
if publishers is None:
publishers = XML.SubElement(xml_parent, 'publishers')
hippub = XML.SubElement(publishers,
'jenkins.plugins.hipchat.HipChatNotifier')
if version >= pkg_resources.parse_version("0.1.8"):
XML.SubElement(hippub, 'buildServerUrl').text = self.jenkinsUrl
XML.SubElement(hippub, 'sendAs').text = self.sendAs
else:
XML.SubElement(hippub, 'jenkinsUrl').text = self.jenkinsUrl
XML.SubElement(hippub, 'authToken').text = self.authToken
# The room specified here is the default room. The default is
# redundant in this case since a room must be specified. Leave empty.
XML.SubElement(hippub, 'room').text = ''
|
rapicastillo/event-etl-bsd
|
refs/heads/master
|
events_etl/migrations/0002_auto_20181008_1754.py
|
1
|
# Generated by Django 2.0 on 2018-10-08 17:54
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('events_etl', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Campaign',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('description', models.TextField()),
('url', models.CharField(max_length=200)),
('access_key', models.CharField(max_length=100)),
('secret_key', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Event',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bsd_id', models.IntegerField(null=True)),
('url', models.CharField(blank=True, max_length=500, null=True)),
('venue', models.CharField(blank=True, max_length=500, null=True)),
('address', models.CharField(blank=True, max_length=500, null=True)),
('city', models.CharField(blank=True, max_length=500, null=True)),
('state', models.CharField(blank=True, max_length=500, null=True)),
('zipcode', models.CharField(blank=True, max_length=100, null=True)),
('latitude', models.FloatField(null=True)),
('longitude', models.FloatField(null=True)),
('start_day', models.DateField(null=True)),
('start_time', models.TimeField(null=True)),
('timezone', models.CharField(blank=True, max_length=50, null=True)),
],
),
migrations.CreateModel(
name='EventSource',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('description', models.TextField()),
],
),
migrations.DeleteModel(
name='Greeting',
),
migrations.AddField(
model_name='campaign',
name='source_type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='events_etl.EventSource'),
),
]
|
BurtBiel/autorest
|
refs/heads/master
|
AutoRest/Generators/Python/Python.Tests/Expected/AcceptanceTests/BodyComplex/autorestcomplextestservice/models/salmon.py
|
4
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .fish import Fish
class Salmon(Fish):
"""Salmon
:param species:
:type species: str
:param length:
:type length: float
:param siblings:
:type siblings: list of :class:`Fish
<fixtures.acceptancetestsbodycomplex.models.Fish>`
:param fishtype: Polymorphic Discriminator
:type fishtype: str
:param location:
:type location: str
:param iswild:
:type iswild: bool
"""
_validation = {
'length': {'required': True},
'fishtype': {'required': True},
}
_attribute_map = {
'species': {'key': 'species', 'type': 'str'},
'length': {'key': 'length', 'type': 'float'},
'siblings': {'key': 'siblings', 'type': '[Fish]'},
'fishtype': {'key': 'fishtype', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'iswild': {'key': 'iswild', 'type': 'bool'},
}
def __init__(self, length, species=None, siblings=None, location=None, iswild=None):
super(Salmon, self).__init__(species=species, length=length, siblings=siblings)
self.location = location
self.iswild = iswild
self.fishtype = 'salmon'
|
KaranToor/MA450
|
refs/heads/master
|
google-cloud-sdk/.install/.backup/lib/googlecloudsdk/command_lib/app/output_helpers.py
|
3
|
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module holds exceptions raised by commands."""
from googlecloudsdk.api_lib.app import deploy_command_util
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_attr
from googlecloudsdk.core.resource import resource_printer
DEPLOY_MESSAGE_TEMPLATE = u"""\
{project}/{service}/{version} (from [{file}])
"""
DEPLOYED_URL_TEMPLATE = u"""\
Deploying to URL: [{url}]
"""
PROMOTE_MESSAGE_TEMPLATE = u"""\
(add --promote if you also want to make this service available from
[{default_url}])
"""
RUNTIME_MISMATCH_MSG = (u"You've generated a Dockerfile that may be customized "
u'for your application. To use this Dockerfile, '
u'the runtime field in [{0}] must be set to custom.')
def DisplayProposedDeployment(app, project, app_config, version, promote):
"""Prints the details of the proposed deployment.
Args:
app: Application resource for the current application (required if any
services are deployed, otherwise ignored).
project: The name of the current project.
app_config: yaml_parsing.AppConfigSet, The configurations being deployed.
version: The version identifier of the application to be deployed.
promote: Whether the newly deployed version will receive all traffic
(this affects deployed URLs).
Returns:
dict (str->str), a mapping of service names to deployed service URLs
This includes information on to-be-deployed services (including service name,
version number, and deployed URLs) as well as configurations.
"""
deployed_urls = {}
if app_config.Services():
if app is None:
raise TypeError('If services are deployed, must provide `app` parameter.')
deploy_messages = []
for service, info in app_config.Services().iteritems():
use_ssl = deploy_command_util.UseSsl(info.parsed.handlers)
deploy_message = DEPLOY_MESSAGE_TEMPLATE.format(
project=project, service=service, version=version,
file=console_attr.DecodeFromInput(info.file))
url = deploy_command_util.GetAppHostname(
app=app, service=info.module, version=None if promote else version,
use_ssl=use_ssl)
deployed_urls[service] = url
deploy_message += DEPLOYED_URL_TEMPLATE.format(url=url)
if not promote:
default_url = deploy_command_util.GetAppHostname(
app=app, service=info.module, use_ssl=use_ssl)
deploy_message += PROMOTE_MESSAGE_TEMPLATE.format(
default_url=default_url)
deploy_messages.append(deploy_message)
fmt = 'list[title="You are about to deploy the following services:"]'
resource_printer.Print(deploy_messages, fmt, out=log.status)
if app_config.Configs():
DisplayProposedConfigDeployments(project, app_config.Configs().values())
return deployed_urls
def DisplayProposedConfigDeployments(project, configs):
"""Prints the details of the proposed config deployments.
Args:
project: The name of the current project.
configs: [yaml_parsing.ConfigYamlInfo], The configurations being
deployed.
"""
fmt = 'list[title="You are about to update the following configurations:"]'
resource_printer.Print(
[u'{0}/{1} (from [{2}])'.format(project, c.config, c.file)
for c in configs], fmt, out=log.status)
|
joerg-krause/upmpdcli
|
refs/heads/master
|
rdpl2stream/RamPlaylistDecoder.py
|
2
|
##########################################################################
# Copyright 2009 Carlos Ribeiro
#
# This file is part of Radio Tray
#
# Radio Tray is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 1 of the License, or
# (at your option) any later version.
#
# Radio Tray is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Radio Tray. If not, see <http://www.gnu.org/licenses/>.
#
##########################################################################
import urllib2
from lib.common import USER_AGENT
import logging
class RamPlaylistDecoder:
def __init__(self):
self.log = logging.getLogger('radiotray')
self.log.debug('RAM playlist decoder')
def isStreamValid(self, contentType, firstBytes):
if('audio/x-pn-realaudio' in contentType or 'audio/vnd.rn-realaudio' in contentType):
self.log.info('Stream is readable by RAM Playlist Decoder')
return True
else:
return False
def extractPlaylist(self, url):
self.log.info('Downloading playlist...')
req = urllib2.Request(url)
req.add_header('User-Agent', USER_AGENT)
f = urllib2.urlopen(req)
str = f.read()
f.close()
self.log.info('Playlist downloaded')
self.log.info('Decoding playlist...')
lines = str.splitlines()
playlist = []
for line in lines:
if line.startswith("#") == False and len(line) > 0:
tmp = line.strip()
if(len(tmp) > 0):
playlist.append(line.strip())
return playlist
|
JNRowe/toolz
|
refs/heads/master
|
setup.py
|
3
|
#!/usr/bin/env python
from os.path import exists
from setuptools import setup
import toolz
setup(name='toolz',
version=toolz.__version__,
description='List processing tools and functional utilities',
url='http://github.com/pytoolz/toolz/',
author='https://raw.github.com/pytoolz/toolz/master/AUTHORS.md',
maintainer='Matthew Rocklin',
maintainer_email='mrocklin@gmail.com',
license='BSD',
keywords='functional utility itertools functools',
packages=['toolz',
'toolz.itertoolz',
'toolz.functoolz',
'toolz.dicttoolz'],
long_description=open('README.md').read() if exists("README.md") else "",
zip_safe=False)
|
nthiep/global-ssh-server
|
refs/heads/master
|
lib/python2.7/site-packages/django/forms/formsets.py
|
79
|
from __future__ import absolute_import, unicode_literals
from django.core.exceptions import ValidationError
from django.forms import Form
from django.forms.fields import IntegerField, BooleanField
from django.forms.util import ErrorList
from django.forms.widgets import Media, HiddenInput
from django.utils.encoding import python_2_unicode_compatible
from django.utils.safestring import mark_safe
from django.utils import six
from django.utils.six.moves import xrange
from django.utils.translation import ugettext as _
__all__ = ('BaseFormSet', 'all_valid')
# special field names
TOTAL_FORM_COUNT = 'TOTAL_FORMS'
INITIAL_FORM_COUNT = 'INITIAL_FORMS'
MAX_NUM_FORM_COUNT = 'MAX_NUM_FORMS'
ORDERING_FIELD_NAME = 'ORDER'
DELETION_FIELD_NAME = 'DELETE'
# default maximum number of forms in a formset, to prevent memory exhaustion
DEFAULT_MAX_NUM = 1000
class ManagementForm(Form):
"""
``ManagementForm`` is used to keep track of how many form instances
are displayed on the page. If adding new forms via javascript, you should
increment the count field of this form as well.
"""
def __init__(self, *args, **kwargs):
self.base_fields[TOTAL_FORM_COUNT] = IntegerField(widget=HiddenInput)
self.base_fields[INITIAL_FORM_COUNT] = IntegerField(widget=HiddenInput)
self.base_fields[MAX_NUM_FORM_COUNT] = IntegerField(required=False, widget=HiddenInput)
super(ManagementForm, self).__init__(*args, **kwargs)
@python_2_unicode_compatible
class BaseFormSet(object):
"""
A collection of instances of the same Form class.
"""
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList):
self.is_bound = data is not None or files is not None
self.prefix = prefix or self.get_default_prefix()
self.auto_id = auto_id
self.data = data or {}
self.files = files or {}
self.initial = initial
self.error_class = error_class
self._errors = None
self._non_form_errors = None
# construct the forms in the formset
self._construct_forms()
def __str__(self):
return self.as_table()
def __iter__(self):
"""Yields the forms in the order they should be rendered"""
return iter(self.forms)
def __getitem__(self, index):
"""Returns the form at the given index, based on the rendering order"""
return self.forms[index]
def __len__(self):
return len(self.forms)
def __bool__(self):
"""All formsets have a management form which is not included in the length"""
return True
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
@property
def management_form(self):
"""Returns the ManagementForm instance for this FormSet."""
if self.is_bound:
form = ManagementForm(self.data, auto_id=self.auto_id, prefix=self.prefix)
if not form.is_valid():
raise ValidationError('ManagementForm data is missing or has been tampered with')
else:
form = ManagementForm(auto_id=self.auto_id, prefix=self.prefix, initial={
TOTAL_FORM_COUNT: self.total_form_count(),
INITIAL_FORM_COUNT: self.initial_form_count(),
MAX_NUM_FORM_COUNT: self.max_num
})
return form
def total_form_count(self):
"""Returns the total number of forms in this FormSet."""
if self.is_bound:
return self.management_form.cleaned_data[TOTAL_FORM_COUNT]
else:
initial_forms = self.initial_form_count()
total_forms = initial_forms + self.extra
# Allow all existing related objects/inlines to be displayed,
# but don't allow extra beyond max_num.
if initial_forms > self.max_num >= 0:
total_forms = initial_forms
elif total_forms > self.max_num >= 0:
total_forms = self.max_num
return total_forms
def initial_form_count(self):
"""Returns the number of forms that are required in this FormSet."""
if self.is_bound:
return self.management_form.cleaned_data[INITIAL_FORM_COUNT]
else:
# Use the length of the initial data if it's there, 0 otherwise.
initial_forms = self.initial and len(self.initial) or 0
if initial_forms > self.max_num >= 0:
initial_forms = self.max_num
return initial_forms
def _construct_forms(self):
# instantiate all the forms and put them in self.forms
self.forms = []
for i in xrange(min(self.total_form_count(), self.absolute_max)):
self.forms.append(self._construct_form(i))
def _construct_form(self, i, **kwargs):
"""
Instantiates and returns the i-th form instance in a formset.
"""
defaults = {
'auto_id': self.auto_id,
'prefix': self.add_prefix(i),
'error_class': self.error_class,
}
if self.is_bound:
defaults['data'] = self.data
defaults['files'] = self.files
if self.initial and not 'initial' in kwargs:
try:
defaults['initial'] = self.initial[i]
except IndexError:
pass
# Allow extra forms to be empty.
if i >= self.initial_form_count():
defaults['empty_permitted'] = True
defaults.update(kwargs)
form = self.form(**defaults)
self.add_fields(form, i)
return form
@property
def initial_forms(self):
"""Return a list of all the initial forms in this formset."""
return self.forms[:self.initial_form_count()]
@property
def extra_forms(self):
"""Return a list of all the extra forms in this formset."""
return self.forms[self.initial_form_count():]
@property
def empty_form(self):
form = self.form(
auto_id=self.auto_id,
prefix=self.add_prefix('__prefix__'),
empty_permitted=True,
)
self.add_fields(form, None)
return form
# Maybe this should just go away?
@property
def cleaned_data(self):
"""
Returns a list of form.cleaned_data dicts for every form in self.forms.
"""
if not self.is_valid():
raise AttributeError("'%s' object has no attribute 'cleaned_data'" % self.__class__.__name__)
return [form.cleaned_data for form in self.forms]
@property
def deleted_forms(self):
"""
Returns a list of forms that have been marked for deletion. Raises an
AttributeError if deletion is not allowed.
"""
if not self.is_valid() or not self.can_delete:
raise AttributeError("'%s' object has no attribute 'deleted_forms'" % self.__class__.__name__)
# construct _deleted_form_indexes which is just a list of form indexes
# that have had their deletion widget set to True
if not hasattr(self, '_deleted_form_indexes'):
self._deleted_form_indexes = []
for i in range(0, self.total_form_count()):
form = self.forms[i]
# if this is an extra form and hasn't changed, don't consider it
if i >= self.initial_form_count() and not form.has_changed():
continue
if self._should_delete_form(form):
self._deleted_form_indexes.append(i)
return [self.forms[i] for i in self._deleted_form_indexes]
@property
def ordered_forms(self):
"""
Returns a list of form in the order specified by the incoming data.
Raises an AttributeError if ordering is not allowed.
"""
if not self.is_valid() or not self.can_order:
raise AttributeError("'%s' object has no attribute 'ordered_forms'" % self.__class__.__name__)
# Construct _ordering, which is a list of (form_index, order_field_value)
# tuples. After constructing this list, we'll sort it by order_field_value
# so we have a way to get to the form indexes in the order specified
# by the form data.
if not hasattr(self, '_ordering'):
self._ordering = []
for i in range(0, self.total_form_count()):
form = self.forms[i]
# if this is an extra form and hasn't changed, don't consider it
if i >= self.initial_form_count() and not form.has_changed():
continue
# don't add data marked for deletion to self.ordered_data
if self.can_delete and self._should_delete_form(form):
continue
self._ordering.append((i, form.cleaned_data[ORDERING_FIELD_NAME]))
# After we're done populating self._ordering, sort it.
# A sort function to order things numerically ascending, but
# None should be sorted below anything else. Allowing None as
# a comparison value makes it so we can leave ordering fields
# blank.
def compare_ordering_key(k):
if k[1] is None:
return (1, 0) # +infinity, larger than any number
return (0, k[1])
self._ordering.sort(key=compare_ordering_key)
# Return a list of form.cleaned_data dicts in the order specified by
# the form data.
return [self.forms[i[0]] for i in self._ordering]
@classmethod
def get_default_prefix(cls):
return 'form'
def non_form_errors(self):
"""
Returns an ErrorList of errors that aren't associated with a particular
form -- i.e., from formset.clean(). Returns an empty ErrorList if there
are none.
"""
if self._non_form_errors is not None:
return self._non_form_errors
return self.error_class()
@property
def errors(self):
"""
Returns a list of form.errors for every form in self.forms.
"""
if self._errors is None:
self.full_clean()
return self._errors
def _should_delete_form(self, form):
"""
Returns whether or not the form was marked for deletion.
"""
return form.cleaned_data.get(DELETION_FIELD_NAME, False)
def is_valid(self):
"""
Returns True if every form in self.forms is valid.
"""
if not self.is_bound:
return False
# We loop over every form.errors here rather than short circuiting on the
# first failure to make sure validation gets triggered for every form.
forms_valid = True
err = self.errors
for i in range(0, self.total_form_count()):
form = self.forms[i]
if self.can_delete:
if self._should_delete_form(form):
# This form is going to be deleted so any of its errors
# should not cause the entire formset to be invalid.
continue
forms_valid &= form.is_valid()
return forms_valid and not bool(self.non_form_errors())
def full_clean(self):
"""
Cleans all of self.data and populates self._errors.
"""
self._errors = []
if not self.is_bound: # Stop further processing.
return
for i in range(0, self.total_form_count()):
form = self.forms[i]
self._errors.append(form.errors)
# Give self.clean() a chance to do cross-form validation.
try:
self.clean()
except ValidationError as e:
self._non_form_errors = self.error_class(e.messages)
def clean(self):
"""
Hook for doing any extra formset-wide cleaning after Form.clean() has
been called on every form. Any ValidationError raised by this method
will not be associated with a particular form; it will be accesible
via formset.non_form_errors()
"""
pass
def has_changed(self):
"""
Returns true if data in any form differs from initial.
"""
return any(form.has_changed() for form in self)
def add_fields(self, form, index):
"""A hook for adding extra fields on to each form instance."""
if self.can_order:
# Only pre-fill the ordering field for initial forms.
if index is not None and index < self.initial_form_count():
form.fields[ORDERING_FIELD_NAME] = IntegerField(label=_('Order'), initial=index+1, required=False)
else:
form.fields[ORDERING_FIELD_NAME] = IntegerField(label=_('Order'), required=False)
if self.can_delete:
form.fields[DELETION_FIELD_NAME] = BooleanField(label=_('Delete'), required=False)
def add_prefix(self, index):
return '%s-%s' % (self.prefix, index)
def is_multipart(self):
"""
Returns True if the formset needs to be multipart, i.e. it
has FileInput. Otherwise, False.
"""
if self.forms:
return self.forms[0].is_multipart()
else:
return self.empty_form.is_multipart()
@property
def media(self):
# All the forms on a FormSet are the same, so you only need to
# interrogate the first form for media.
if self.forms:
return self.forms[0].media
else:
return self.empty_form.media
def as_table(self):
"Returns this formset rendered as HTML <tr>s -- excluding the <table></table>."
# XXX: there is no semantic division between forms here, there
# probably should be. It might make sense to render each form as a
# table row with each field as a td.
forms = ' '.join([form.as_table() for form in self])
return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def as_p(self):
"Returns this formset rendered as HTML <p>s."
forms = ' '.join([form.as_p() for form in self])
return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def as_ul(self):
"Returns this formset rendered as HTML <li>s."
forms = ' '.join([form.as_ul() for form in self])
return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def formset_factory(form, formset=BaseFormSet, extra=1, can_order=False,
can_delete=False, max_num=None):
"""Return a FormSet for the given form class."""
if max_num is None:
max_num = DEFAULT_MAX_NUM
# hard limit on forms instantiated, to prevent memory-exhaustion attacks
# limit defaults to DEFAULT_MAX_NUM, but developer can increase it via max_num
absolute_max = max(DEFAULT_MAX_NUM, max_num)
attrs = {'form': form, 'extra': extra,
'can_order': can_order, 'can_delete': can_delete,
'max_num': max_num, 'absolute_max': absolute_max}
return type(form.__name__ + str('FormSet'), (formset,), attrs)
def all_valid(formsets):
"""Returns true if every formset in formsets is valid."""
valid = True
for formset in formsets:
if not formset.is_valid():
valid = False
return valid
|
10clouds/edx-platform
|
refs/heads/dev
|
lms/djangoapps/teams/urls.py
|
129
|
"""Defines the URL routes for this app."""
from django.conf.urls import patterns, url
from django.contrib.auth.decorators import login_required
from .views import TeamsDashboardView
urlpatterns = patterns(
'teams.views',
url(r"^/$", login_required(TeamsDashboardView.as_view()), name="teams_dashboard")
)
|
gangadhar-kadam/adb-erp
|
refs/heads/master
|
stock/doctype/batch/batch.py
|
483
|
# ERPNext - web based ERP (http://erpnext.com)
# Copyright (C) 2012 Web Notes Technologies Pvt Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import webnotes
class DocType:
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
|
WillisXChen/django-oscar
|
refs/heads/master
|
oscar/lib/python2.7/site-packages/requests/packages/chardet/jpcntx.py
|
1776
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .compat import wrap_ord
NUM_OF_CATEGORY = 6
DONT_KNOW = -1
ENOUGH_REL_THRESHOLD = 100
MAX_REL_THRESHOLD = 1000
MINIMUM_DATA_THRESHOLD = 4
# This is hiragana 2-char sequence table, the number in each cell represents its frequency category
jp2CharContext = (
(0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1),
(2,4,0,4,0,3,0,4,0,3,4,4,4,2,4,3,3,4,3,2,3,3,4,2,3,3,3,2,4,1,4,3,3,1,5,4,3,4,3,4,3,5,3,0,3,5,4,2,0,3,1,0,3,3,0,3,3,0,1,1,0,4,3,0,3,3,0,4,0,2,0,3,5,5,5,5,4,0,4,1,0,3,4),
(0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2),
(0,4,0,5,0,5,0,4,0,4,5,4,4,3,5,3,5,1,5,3,4,3,4,4,3,4,3,3,4,3,5,4,4,3,5,5,3,5,5,5,3,5,5,3,4,5,5,3,1,3,2,0,3,4,0,4,2,0,4,2,1,5,3,2,3,5,0,4,0,2,0,5,4,4,5,4,5,0,4,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,4,0,3,0,3,0,4,5,4,3,3,3,3,4,3,5,4,4,3,5,4,4,3,4,3,4,4,4,4,5,3,4,4,3,4,5,5,4,5,5,1,4,5,4,3,0,3,3,1,3,3,0,4,4,0,3,3,1,5,3,3,3,5,0,4,0,3,0,4,4,3,4,3,3,0,4,1,1,3,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,4,0,3,0,3,0,4,0,3,4,4,3,2,2,1,2,1,3,1,3,3,3,3,3,4,3,1,3,3,5,3,3,0,4,3,0,5,4,3,3,5,4,4,3,4,4,5,0,1,2,0,1,2,0,2,2,0,1,0,0,5,2,2,1,4,0,3,0,1,0,4,4,3,5,4,3,0,2,1,0,4,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,5,0,4,0,2,1,4,4,2,4,1,4,2,4,2,4,3,3,3,4,3,3,3,3,1,4,2,3,3,3,1,4,4,1,1,1,4,3,3,2,0,2,4,3,2,0,3,3,0,3,1,1,0,0,0,3,3,0,4,2,2,3,4,0,4,0,3,0,4,4,5,3,4,4,0,3,0,0,1,4),
(1,4,0,4,0,4,0,4,0,3,5,4,4,3,4,3,5,4,3,3,4,3,5,4,4,4,4,3,4,2,4,3,3,1,5,4,3,2,4,5,4,5,5,4,4,5,4,4,0,3,2,2,3,3,0,4,3,1,3,2,1,4,3,3,4,5,0,3,0,2,0,4,5,5,4,5,4,0,4,0,0,5,4),
(0,5,0,5,0,4,0,3,0,4,4,3,4,3,3,3,4,0,4,4,4,3,4,3,4,3,3,1,4,2,4,3,4,0,5,4,1,4,5,4,4,5,3,2,4,3,4,3,2,4,1,3,3,3,2,3,2,0,4,3,3,4,3,3,3,4,0,4,0,3,0,4,5,4,4,4,3,0,4,1,0,1,3),
(0,3,1,4,0,3,0,2,0,3,4,4,3,1,4,2,3,3,4,3,4,3,4,3,4,4,3,2,3,1,5,4,4,1,4,4,3,5,4,4,3,5,5,4,3,4,4,3,1,2,3,1,2,2,0,3,2,0,3,1,0,5,3,3,3,4,3,3,3,3,4,4,4,4,5,4,2,0,3,3,2,4,3),
(0,2,0,3,0,1,0,1,0,0,3,2,0,0,2,0,1,0,2,1,3,3,3,1,2,3,1,0,1,0,4,2,1,1,3,3,0,4,3,3,1,4,3,3,0,3,3,2,0,0,0,0,1,0,0,2,0,0,0,0,0,4,1,0,2,3,2,2,2,1,3,3,3,4,4,3,2,0,3,1,0,3,3),
(0,4,0,4,0,3,0,3,0,4,4,4,3,3,3,3,3,3,4,3,4,2,4,3,4,3,3,2,4,3,4,5,4,1,4,5,3,5,4,5,3,5,4,0,3,5,5,3,1,3,3,2,2,3,0,3,4,1,3,3,2,4,3,3,3,4,0,4,0,3,0,4,5,4,4,5,3,0,4,1,0,3,4),
(0,2,0,3,0,3,0,0,0,2,2,2,1,0,1,0,0,0,3,0,3,0,3,0,1,3,1,0,3,1,3,3,3,1,3,3,3,0,1,3,1,3,4,0,0,3,1,1,0,3,2,0,0,0,0,1,3,0,1,0,0,3,3,2,0,3,0,0,0,0,0,3,4,3,4,3,3,0,3,0,0,2,3),
(2,3,0,3,0,2,0,1,0,3,3,4,3,1,3,1,1,1,3,1,4,3,4,3,3,3,0,0,3,1,5,4,3,1,4,3,2,5,5,4,4,4,4,3,3,4,4,4,0,2,1,1,3,2,0,1,2,0,0,1,0,4,1,3,3,3,0,3,0,1,0,4,4,4,5,5,3,0,2,0,0,4,4),
(0,2,0,1,0,3,1,3,0,2,3,3,3,0,3,1,0,0,3,0,3,2,3,1,3,2,1,1,0,0,4,2,1,0,2,3,1,4,3,2,0,4,4,3,1,3,1,3,0,1,0,0,1,0,0,0,1,0,0,0,0,4,1,1,1,2,0,3,0,0,0,3,4,2,4,3,2,0,1,0,0,3,3),
(0,1,0,4,0,5,0,4,0,2,4,4,2,3,3,2,3,3,5,3,3,3,4,3,4,2,3,0,4,3,3,3,4,1,4,3,2,1,5,5,3,4,5,1,3,5,4,2,0,3,3,0,1,3,0,4,2,0,1,3,1,4,3,3,3,3,0,3,0,1,0,3,4,4,4,5,5,0,3,0,1,4,5),
(0,2,0,3,0,3,0,0,0,2,3,1,3,0,4,0,1,1,3,0,3,4,3,2,3,1,0,3,3,2,3,1,3,0,2,3,0,2,1,4,1,2,2,0,0,3,3,0,0,2,0,0,0,1,0,0,0,0,2,2,0,3,2,1,3,3,0,2,0,2,0,0,3,3,1,2,4,0,3,0,2,2,3),
(2,4,0,5,0,4,0,4,0,2,4,4,4,3,4,3,3,3,1,2,4,3,4,3,4,4,5,0,3,3,3,3,2,0,4,3,1,4,3,4,1,4,4,3,3,4,4,3,1,2,3,0,4,2,0,4,1,0,3,3,0,4,3,3,3,4,0,4,0,2,0,3,5,3,4,5,2,0,3,0,0,4,5),
(0,3,0,4,0,1,0,1,0,1,3,2,2,1,3,0,3,0,2,0,2,0,3,0,2,0,0,0,1,0,1,1,0,0,3,1,0,0,0,4,0,3,1,0,2,1,3,0,0,0,0,0,0,3,0,0,0,0,0,0,0,4,2,2,3,1,0,3,0,0,0,1,4,4,4,3,0,0,4,0,0,1,4),
(1,4,1,5,0,3,0,3,0,4,5,4,4,3,5,3,3,4,4,3,4,1,3,3,3,3,2,1,4,1,5,4,3,1,4,4,3,5,4,4,3,5,4,3,3,4,4,4,0,3,3,1,2,3,0,3,1,0,3,3,0,5,4,4,4,4,4,4,3,3,5,4,4,3,3,5,4,0,3,2,0,4,4),
(0,2,0,3,0,1,0,0,0,1,3,3,3,2,4,1,3,0,3,1,3,0,2,2,1,1,0,0,2,0,4,3,1,0,4,3,0,4,4,4,1,4,3,1,1,3,3,1,0,2,0,0,1,3,0,0,0,0,2,0,0,4,3,2,4,3,5,4,3,3,3,4,3,3,4,3,3,0,2,1,0,3,3),
(0,2,0,4,0,3,0,2,0,2,5,5,3,4,4,4,4,1,4,3,3,0,4,3,4,3,1,3,3,2,4,3,0,3,4,3,0,3,4,4,2,4,4,0,4,5,3,3,2,2,1,1,1,2,0,1,5,0,3,3,2,4,3,3,3,4,0,3,0,2,0,4,4,3,5,5,0,0,3,0,2,3,3),
(0,3,0,4,0,3,0,1,0,3,4,3,3,1,3,3,3,0,3,1,3,0,4,3,3,1,1,0,3,0,3,3,0,0,4,4,0,1,5,4,3,3,5,0,3,3,4,3,0,2,0,1,1,1,0,1,3,0,1,2,1,3,3,2,3,3,0,3,0,1,0,1,3,3,4,4,1,0,1,2,2,1,3),
(0,1,0,4,0,4,0,3,0,1,3,3,3,2,3,1,1,0,3,0,3,3,4,3,2,4,2,0,1,0,4,3,2,0,4,3,0,5,3,3,2,4,4,4,3,3,3,4,0,1,3,0,0,1,0,0,1,0,0,0,0,4,2,3,3,3,0,3,0,0,0,4,4,4,5,3,2,0,3,3,0,3,5),
(0,2,0,3,0,0,0,3,0,1,3,0,2,0,0,0,1,0,3,1,1,3,3,0,0,3,0,0,3,0,2,3,1,0,3,1,0,3,3,2,0,4,2,2,0,2,0,0,0,4,0,0,0,0,0,0,0,0,0,0,0,2,1,2,0,1,0,1,0,0,0,1,3,1,2,0,0,0,1,0,0,1,4),
(0,3,0,3,0,5,0,1,0,2,4,3,1,3,3,2,1,1,5,2,1,0,5,1,2,0,0,0,3,3,2,2,3,2,4,3,0,0,3,3,1,3,3,0,2,5,3,4,0,3,3,0,1,2,0,2,2,0,3,2,0,2,2,3,3,3,0,2,0,1,0,3,4,4,2,5,4,0,3,0,0,3,5),
(0,3,0,3,0,3,0,1,0,3,3,3,3,0,3,0,2,0,2,1,1,0,2,0,1,0,0,0,2,1,0,0,1,0,3,2,0,0,3,3,1,2,3,1,0,3,3,0,0,1,0,0,0,0,0,2,0,0,0,0,0,2,3,1,2,3,0,3,0,1,0,3,2,1,0,4,3,0,1,1,0,3,3),
(0,4,0,5,0,3,0,3,0,4,5,5,4,3,5,3,4,3,5,3,3,2,5,3,4,4,4,3,4,3,4,5,5,3,4,4,3,4,4,5,4,4,4,3,4,5,5,4,2,3,4,2,3,4,0,3,3,1,4,3,2,4,3,3,5,5,0,3,0,3,0,5,5,5,5,4,4,0,4,0,1,4,4),
(0,4,0,4,0,3,0,3,0,3,5,4,4,2,3,2,5,1,3,2,5,1,4,2,3,2,3,3,4,3,3,3,3,2,5,4,1,3,3,5,3,4,4,0,4,4,3,1,1,3,1,0,2,3,0,2,3,0,3,0,0,4,3,1,3,4,0,3,0,2,0,4,4,4,3,4,5,0,4,0,0,3,4),
(0,3,0,3,0,3,1,2,0,3,4,4,3,3,3,0,2,2,4,3,3,1,3,3,3,1,1,0,3,1,4,3,2,3,4,4,2,4,4,4,3,4,4,3,2,4,4,3,1,3,3,1,3,3,0,4,1,0,2,2,1,4,3,2,3,3,5,4,3,3,5,4,4,3,3,0,4,0,3,2,2,4,4),
(0,2,0,1,0,0,0,0,0,1,2,1,3,0,0,0,0,0,2,0,1,2,1,0,0,1,0,0,0,0,3,0,0,1,0,1,1,3,1,0,0,0,1,1,0,1,1,0,0,0,0,0,2,0,0,0,0,0,0,0,0,1,1,2,2,0,3,4,0,0,0,1,1,0,0,1,0,0,0,0,0,1,1),
(0,1,0,0,0,1,0,0,0,0,4,0,4,1,4,0,3,0,4,0,3,0,4,0,3,0,3,0,4,1,5,1,4,0,0,3,0,5,0,5,2,0,1,0,0,0,2,1,4,0,1,3,0,0,3,0,0,3,1,1,4,1,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0),
(1,4,0,5,0,3,0,2,0,3,5,4,4,3,4,3,5,3,4,3,3,0,4,3,3,3,3,3,3,2,4,4,3,1,3,4,4,5,4,4,3,4,4,1,3,5,4,3,3,3,1,2,2,3,3,1,3,1,3,3,3,5,3,3,4,5,0,3,0,3,0,3,4,3,4,4,3,0,3,0,2,4,3),
(0,1,0,4,0,0,0,0,0,1,4,0,4,1,4,2,4,0,3,0,1,0,1,0,0,0,0,0,2,0,3,1,1,1,0,3,0,0,0,1,2,1,0,0,1,1,1,1,0,1,0,0,0,1,0,0,3,0,0,0,0,3,2,0,2,2,0,1,0,0,0,2,3,2,3,3,0,0,0,0,2,1,0),
(0,5,1,5,0,3,0,3,0,5,4,4,5,1,5,3,3,0,4,3,4,3,5,3,4,3,3,2,4,3,4,3,3,0,3,3,1,4,4,3,4,4,4,3,4,5,5,3,2,3,1,1,3,3,1,3,1,1,3,3,2,4,5,3,3,5,0,4,0,3,0,4,4,3,5,3,3,0,3,4,0,4,3),
(0,5,0,5,0,3,0,2,0,4,4,3,5,2,4,3,3,3,4,4,4,3,5,3,5,3,3,1,4,0,4,3,3,0,3,3,0,4,4,4,4,5,4,3,3,5,5,3,2,3,1,2,3,2,0,1,0,0,3,2,2,4,4,3,1,5,0,4,0,3,0,4,3,1,3,2,1,0,3,3,0,3,3),
(0,4,0,5,0,5,0,4,0,4,5,5,5,3,4,3,3,2,5,4,4,3,5,3,5,3,4,0,4,3,4,4,3,2,4,4,3,4,5,4,4,5,5,0,3,5,5,4,1,3,3,2,3,3,1,3,1,0,4,3,1,4,4,3,4,5,0,4,0,2,0,4,3,4,4,3,3,0,4,0,0,5,5),
(0,4,0,4,0,5,0,1,1,3,3,4,4,3,4,1,3,0,5,1,3,0,3,1,3,1,1,0,3,0,3,3,4,0,4,3,0,4,4,4,3,4,4,0,3,5,4,1,0,3,0,0,2,3,0,3,1,0,3,1,0,3,2,1,3,5,0,3,0,1,0,3,2,3,3,4,4,0,2,2,0,4,4),
(2,4,0,5,0,4,0,3,0,4,5,5,4,3,5,3,5,3,5,3,5,2,5,3,4,3,3,4,3,4,5,3,2,1,5,4,3,2,3,4,5,3,4,1,2,5,4,3,0,3,3,0,3,2,0,2,3,0,4,1,0,3,4,3,3,5,0,3,0,1,0,4,5,5,5,4,3,0,4,2,0,3,5),
(0,5,0,4,0,4,0,2,0,5,4,3,4,3,4,3,3,3,4,3,4,2,5,3,5,3,4,1,4,3,4,4,4,0,3,5,0,4,4,4,4,5,3,1,3,4,5,3,3,3,3,3,3,3,0,2,2,0,3,3,2,4,3,3,3,5,3,4,1,3,3,5,3,2,0,0,0,0,4,3,1,3,3),
(0,1,0,3,0,3,0,1,0,1,3,3,3,2,3,3,3,0,3,0,0,0,3,1,3,0,0,0,2,2,2,3,0,0,3,2,0,1,2,4,1,3,3,0,0,3,3,3,0,1,0,0,2,1,0,0,3,0,3,1,0,3,0,0,1,3,0,2,0,1,0,3,3,1,3,3,0,0,1,1,0,3,3),
(0,2,0,3,0,2,1,4,0,2,2,3,1,1,3,1,1,0,2,0,3,1,2,3,1,3,0,0,1,0,4,3,2,3,3,3,1,4,2,3,3,3,3,1,0,3,1,4,0,1,1,0,1,2,0,1,1,0,1,1,0,3,1,3,2,2,0,1,0,0,0,2,3,3,3,1,0,0,0,0,0,2,3),
(0,5,0,4,0,5,0,2,0,4,5,5,3,3,4,3,3,1,5,4,4,2,4,4,4,3,4,2,4,3,5,5,4,3,3,4,3,3,5,5,4,5,5,1,3,4,5,3,1,4,3,1,3,3,0,3,3,1,4,3,1,4,5,3,3,5,0,4,0,3,0,5,3,3,1,4,3,0,4,0,1,5,3),
(0,5,0,5,0,4,0,2,0,4,4,3,4,3,3,3,3,3,5,4,4,4,4,4,4,5,3,3,5,2,4,4,4,3,4,4,3,3,4,4,5,5,3,3,4,3,4,3,3,4,3,3,3,3,1,2,2,1,4,3,3,5,4,4,3,4,0,4,0,3,0,4,4,4,4,4,1,0,4,2,0,2,4),
(0,4,0,4,0,3,0,1,0,3,5,2,3,0,3,0,2,1,4,2,3,3,4,1,4,3,3,2,4,1,3,3,3,0,3,3,0,0,3,3,3,5,3,3,3,3,3,2,0,2,0,0,2,0,0,2,0,0,1,0,0,3,1,2,2,3,0,3,0,2,0,4,4,3,3,4,1,0,3,0,0,2,4),
(0,0,0,4,0,0,0,0,0,0,1,0,1,0,2,0,0,0,0,0,1,0,2,0,1,0,0,0,0,0,3,1,3,0,3,2,0,0,0,1,0,3,2,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,4,0,2,0,0,0,0,0,0,2),
(0,2,1,3,0,2,0,2,0,3,3,3,3,1,3,1,3,3,3,3,3,3,4,2,2,1,2,1,4,0,4,3,1,3,3,3,2,4,3,5,4,3,3,3,3,3,3,3,0,1,3,0,2,0,0,1,0,0,1,0,0,4,2,0,2,3,0,3,3,0,3,3,4,2,3,1,4,0,1,2,0,2,3),
(0,3,0,3,0,1,0,3,0,2,3,3,3,0,3,1,2,0,3,3,2,3,3,2,3,2,3,1,3,0,4,3,2,0,3,3,1,4,3,3,2,3,4,3,1,3,3,1,1,0,1,1,0,1,0,1,0,1,0,0,0,4,1,1,0,3,0,3,1,0,2,3,3,3,3,3,1,0,0,2,0,3,3),
(0,0,0,0,0,0,0,0,0,0,3,0,2,0,3,0,0,0,0,0,0,0,3,0,0,0,0,0,0,0,3,0,3,0,3,1,0,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,2,0,2,3,0,0,0,0,0,0,0,0,3),
(0,2,0,3,1,3,0,3,0,2,3,3,3,1,3,1,3,1,3,1,3,3,3,1,3,0,2,3,1,1,4,3,3,2,3,3,1,2,2,4,1,3,3,0,1,4,2,3,0,1,3,0,3,0,0,1,3,0,2,0,0,3,3,2,1,3,0,3,0,2,0,3,4,4,4,3,1,0,3,0,0,3,3),
(0,2,0,1,0,2,0,0,0,1,3,2,2,1,3,0,1,1,3,0,3,2,3,1,2,0,2,0,1,1,3,3,3,0,3,3,1,1,2,3,2,3,3,1,2,3,2,0,0,1,0,0,0,0,0,0,3,0,1,0,0,2,1,2,1,3,0,3,0,0,0,3,4,4,4,3,2,0,2,0,0,2,4),
(0,0,0,1,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,2,2,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,1,3,1,0,0,0,0,0,0,0,3),
(0,3,0,3,0,2,0,3,0,3,3,3,2,3,2,2,2,0,3,1,3,3,3,2,3,3,0,0,3,0,3,2,2,0,2,3,1,4,3,4,3,3,2,3,1,5,4,4,0,3,1,2,1,3,0,3,1,1,2,0,2,3,1,3,1,3,0,3,0,1,0,3,3,4,4,2,1,0,2,1,0,2,4),
(0,1,0,3,0,1,0,2,0,1,4,2,5,1,4,0,2,0,2,1,3,1,4,0,2,1,0,0,2,1,4,1,1,0,3,3,0,5,1,3,2,3,3,1,0,3,2,3,0,1,0,0,0,0,0,0,1,0,0,0,0,4,0,1,0,3,0,2,0,1,0,3,3,3,4,3,3,0,0,0,0,2,3),
(0,0,0,1,0,0,0,0,0,0,2,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,1,0,0,1,0,0,0,0,0,3),
(0,1,0,3,0,4,0,3,0,2,4,3,1,0,3,2,2,1,3,1,2,2,3,1,1,1,2,1,3,0,1,2,0,1,3,2,1,3,0,5,5,1,0,0,1,3,2,1,0,3,0,0,1,0,0,0,0,0,3,4,0,1,1,1,3,2,0,2,0,1,0,2,3,3,1,2,3,0,1,0,1,0,4),
(0,0,0,1,0,3,0,3,0,2,2,1,0,0,4,0,3,0,3,1,3,0,3,0,3,0,1,0,3,0,3,1,3,0,3,3,0,0,1,2,1,1,1,0,1,2,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,2,2,1,2,0,0,2,0,0,0,0,2,3,3,3,3,0,0,0,0,1,4),
(0,0,0,3,0,3,0,0,0,0,3,1,1,0,3,0,1,0,2,0,1,0,0,0,0,0,0,0,1,0,3,0,2,0,2,3,0,0,2,2,3,1,2,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,2,0,0,0,0,2,3),
(2,4,0,5,0,5,0,4,0,3,4,3,3,3,4,3,3,3,4,3,4,4,5,4,5,5,5,2,3,0,5,5,4,1,5,4,3,1,5,4,3,4,4,3,3,4,3,3,0,3,2,0,2,3,0,3,0,0,3,3,0,5,3,2,3,3,0,3,0,3,0,3,4,5,4,5,3,0,4,3,0,3,4),
(0,3,0,3,0,3,0,3,0,3,3,4,3,2,3,2,3,0,4,3,3,3,3,3,3,3,3,0,3,2,4,3,3,1,3,4,3,4,4,4,3,4,4,3,2,4,4,1,0,2,0,0,1,1,0,2,0,0,3,1,0,5,3,2,1,3,0,3,0,1,2,4,3,2,4,3,3,0,3,2,0,4,4),
(0,3,0,3,0,1,0,0,0,1,4,3,3,2,3,1,3,1,4,2,3,2,4,2,3,4,3,0,2,2,3,3,3,0,3,3,3,0,3,4,1,3,3,0,3,4,3,3,0,1,1,0,1,0,0,0,4,0,3,0,0,3,1,2,1,3,0,4,0,1,0,4,3,3,4,3,3,0,2,0,0,3,3),
(0,3,0,4,0,1,0,3,0,3,4,3,3,0,3,3,3,1,3,1,3,3,4,3,3,3,0,0,3,1,5,3,3,1,3,3,2,5,4,3,3,4,5,3,2,5,3,4,0,1,0,0,0,0,0,2,0,0,1,1,0,4,2,2,1,3,0,3,0,2,0,4,4,3,5,3,2,0,1,1,0,3,4),
(0,5,0,4,0,5,0,2,0,4,4,3,3,2,3,3,3,1,4,3,4,1,5,3,4,3,4,0,4,2,4,3,4,1,5,4,0,4,4,4,4,5,4,1,3,5,4,2,1,4,1,1,3,2,0,3,1,0,3,2,1,4,3,3,3,4,0,4,0,3,0,4,4,4,3,3,3,0,4,2,0,3,4),
(1,4,0,4,0,3,0,1,0,3,3,3,1,1,3,3,2,2,3,3,1,0,3,2,2,1,2,0,3,1,2,1,2,0,3,2,0,2,2,3,3,4,3,0,3,3,1,2,0,1,1,3,1,2,0,0,3,0,1,1,0,3,2,2,3,3,0,3,0,0,0,2,3,3,4,3,3,0,1,0,0,1,4),
(0,4,0,4,0,4,0,0,0,3,4,4,3,1,4,2,3,2,3,3,3,1,4,3,4,0,3,0,4,2,3,3,2,2,5,4,2,1,3,4,3,4,3,1,3,3,4,2,0,2,1,0,3,3,0,0,2,0,3,1,0,4,4,3,4,3,0,4,0,1,0,2,4,4,4,4,4,0,3,2,0,3,3),
(0,0,0,1,0,4,0,0,0,0,0,0,1,1,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,1,0,3,2,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,2),
(0,2,0,3,0,4,0,4,0,1,3,3,3,0,4,0,2,1,2,1,1,1,2,0,3,1,1,0,1,0,3,1,0,0,3,3,2,0,1,1,0,0,0,0,0,1,0,2,0,2,2,0,3,1,0,0,1,0,1,1,0,1,2,0,3,0,0,0,0,1,0,0,3,3,4,3,1,0,1,0,3,0,2),
(0,0,0,3,0,5,0,0,0,0,1,0,2,0,3,1,0,1,3,0,0,0,2,0,0,0,1,0,0,0,1,1,0,0,4,0,0,0,2,3,0,1,4,1,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,0,0,1,0,0,0,0,0,0,0,2,0,0,3,0,0,0,0,0,3),
(0,2,0,5,0,5,0,1,0,2,4,3,3,2,5,1,3,2,3,3,3,0,4,1,2,0,3,0,4,0,2,2,1,1,5,3,0,0,1,4,2,3,2,0,3,3,3,2,0,2,4,1,1,2,0,1,1,0,3,1,0,1,3,1,2,3,0,2,0,0,0,1,3,5,4,4,4,0,3,0,0,1,3),
(0,4,0,5,0,4,0,4,0,4,5,4,3,3,4,3,3,3,4,3,4,4,5,3,4,5,4,2,4,2,3,4,3,1,4,4,1,3,5,4,4,5,5,4,4,5,5,5,2,3,3,1,4,3,1,3,3,0,3,3,1,4,3,4,4,4,0,3,0,4,0,3,3,4,4,5,0,0,4,3,0,4,5),
(0,4,0,4,0,3,0,3,0,3,4,4,4,3,3,2,4,3,4,3,4,3,5,3,4,3,2,1,4,2,4,4,3,1,3,4,2,4,5,5,3,4,5,4,1,5,4,3,0,3,2,2,3,2,1,3,1,0,3,3,3,5,3,3,3,5,4,4,2,3,3,4,3,3,3,2,1,0,3,2,1,4,3),
(0,4,0,5,0,4,0,3,0,3,5,5,3,2,4,3,4,0,5,4,4,1,4,4,4,3,3,3,4,3,5,5,2,3,3,4,1,2,5,5,3,5,5,2,3,5,5,4,0,3,2,0,3,3,1,1,5,1,4,1,0,4,3,2,3,5,0,4,0,3,0,5,4,3,4,3,0,0,4,1,0,4,4),
(1,3,0,4,0,2,0,2,0,2,5,5,3,3,3,3,3,0,4,2,3,4,4,4,3,4,0,0,3,4,5,4,3,3,3,3,2,5,5,4,5,5,5,4,3,5,5,5,1,3,1,0,1,0,0,3,2,0,4,2,0,5,2,3,2,4,1,3,0,3,0,4,5,4,5,4,3,0,4,2,0,5,4),
(0,3,0,4,0,5,0,3,0,3,4,4,3,2,3,2,3,3,3,3,3,2,4,3,3,2,2,0,3,3,3,3,3,1,3,3,3,0,4,4,3,4,4,1,1,4,4,2,0,3,1,0,1,1,0,4,1,0,2,3,1,3,3,1,3,4,0,3,0,1,0,3,1,3,0,0,1,0,2,0,0,4,4),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0),
(0,3,0,3,0,2,0,3,0,1,5,4,3,3,3,1,4,2,1,2,3,4,4,2,4,4,5,0,3,1,4,3,4,0,4,3,3,3,2,3,2,5,3,4,3,2,2,3,0,0,3,0,2,1,0,1,2,0,0,0,0,2,1,1,3,1,0,2,0,4,0,3,4,4,4,5,2,0,2,0,0,1,3),
(0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,1,0,0,1,1,0,0,0,4,2,1,1,0,1,0,3,2,0,0,3,1,1,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,1,0,0,0,2,0,0,0,1,4,0,4,2,1,0,0,0,0,0,1),
(0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,1,0,1,0,0,0,0,3,1,0,0,0,2,0,2,1,0,0,1,2,1,0,1,1,0,0,3,0,0,0,0,0,0,0,0,0,0,0,1,3,1,0,0,0,0,0,1,0,0,2,1,0,0,0,0,0,0,0,0,2),
(0,4,0,4,0,4,0,3,0,4,4,3,4,2,4,3,2,0,4,4,4,3,5,3,5,3,3,2,4,2,4,3,4,3,1,4,0,2,3,4,4,4,3,3,3,4,4,4,3,4,1,3,4,3,2,1,2,1,3,3,3,4,4,3,3,5,0,4,0,3,0,4,3,3,3,2,1,0,3,0,0,3,3),
(0,4,0,3,0,3,0,3,0,3,5,5,3,3,3,3,4,3,4,3,3,3,4,4,4,3,3,3,3,4,3,5,3,3,1,3,2,4,5,5,5,5,4,3,4,5,5,3,2,2,3,3,3,3,2,3,3,1,2,3,2,4,3,3,3,4,0,4,0,2,0,4,3,2,2,1,2,0,3,0,0,4,1),
)
class JapaneseContextAnalysis:
def __init__(self):
self.reset()
def reset(self):
self._mTotalRel = 0 # total sequence received
# category counters, each interger counts sequence in its category
self._mRelSample = [0] * NUM_OF_CATEGORY
# if last byte in current buffer is not the last byte of a character,
# we need to know how many bytes to skip in next buffer
self._mNeedToSkipCharNum = 0
self._mLastCharOrder = -1 # The order of previous char
# If this flag is set to True, detection is done and conclusion has
# been made
self._mDone = False
def feed(self, aBuf, aLen):
if self._mDone:
return
# The buffer we got is byte oriented, and a character may span in more than one
# buffers. In case the last one or two byte in last buffer is not
# complete, we record how many byte needed to complete that character
# and skip these bytes here. We can choose to record those bytes as
# well and analyse the character once it is complete, but since a
# character will not make much difference, by simply skipping
# this character will simply our logic and improve performance.
i = self._mNeedToSkipCharNum
while i < aLen:
order, charLen = self.get_order(aBuf[i:i + 2])
i += charLen
if i > aLen:
self._mNeedToSkipCharNum = i - aLen
self._mLastCharOrder = -1
else:
if (order != -1) and (self._mLastCharOrder != -1):
self._mTotalRel += 1
if self._mTotalRel > MAX_REL_THRESHOLD:
self._mDone = True
break
self._mRelSample[jp2CharContext[self._mLastCharOrder][order]] += 1
self._mLastCharOrder = order
def got_enough_data(self):
return self._mTotalRel > ENOUGH_REL_THRESHOLD
def get_confidence(self):
# This is just one way to calculate confidence. It works well for me.
if self._mTotalRel > MINIMUM_DATA_THRESHOLD:
return (self._mTotalRel - self._mRelSample[0]) / self._mTotalRel
else:
return DONT_KNOW
def get_order(self, aBuf):
return -1, 1
class SJISContextAnalysis(JapaneseContextAnalysis):
def __init__(self):
self.charset_name = "SHIFT_JIS"
def get_charset_name(self):
return self.charset_name
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if ((0x81 <= first_char <= 0x9F) or (0xE0 <= first_char <= 0xFC)):
charLen = 2
if (first_char == 0x87) or (0xFA <= first_char <= 0xFC):
self.charset_name = "CP932"
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 202) and (0x9F <= second_char <= 0xF1):
return second_char - 0x9F, charLen
return -1, charLen
class EUCJPContextAnalysis(JapaneseContextAnalysis):
def get_order(self, aBuf):
if not aBuf:
return -1, 1
# find out current char's byte length
first_char = wrap_ord(aBuf[0])
if (first_char == 0x8E) or (0xA1 <= first_char <= 0xFE):
charLen = 2
elif first_char == 0x8F:
charLen = 3
else:
charLen = 1
# return its order if it is hiragana
if len(aBuf) > 1:
second_char = wrap_ord(aBuf[1])
if (first_char == 0xA4) and (0xA1 <= second_char <= 0xF3):
return second_char - 0xA1, charLen
return -1, charLen
# flake8: noqa
|
hoangt/ScaffCC
|
refs/heads/master
|
clang/bindings/python/tests/cindex/test_type.py
|
5
|
from clang.cindex import CursorKind
from clang.cindex import TypeKind
from nose.tools import raises
from .util import get_cursor
from .util import get_tu
kInput = """\
typedef int I;
struct teststruct {
int a;
I b;
long c;
unsigned long d;
signed long e;
const int f;
int *g;
int ***h;
};
"""
def test_a_struct():
tu = get_tu(kInput)
teststruct = get_cursor(tu, 'teststruct')
assert teststruct is not None, "Could not find teststruct."
fields = list(teststruct.get_children())
assert all(x.kind == CursorKind.FIELD_DECL for x in fields)
assert fields[0].spelling == 'a'
assert not fields[0].type.is_const_qualified()
assert fields[0].type.kind == TypeKind.INT
assert fields[0].type.get_canonical().kind == TypeKind.INT
assert fields[1].spelling == 'b'
assert not fields[1].type.is_const_qualified()
assert fields[1].type.kind == TypeKind.TYPEDEF
assert fields[1].type.get_canonical().kind == TypeKind.INT
assert fields[1].type.get_declaration().spelling == 'I'
assert fields[2].spelling == 'c'
assert not fields[2].type.is_const_qualified()
assert fields[2].type.kind == TypeKind.LONG
assert fields[2].type.get_canonical().kind == TypeKind.LONG
assert fields[3].spelling == 'd'
assert not fields[3].type.is_const_qualified()
assert fields[3].type.kind == TypeKind.ULONG
assert fields[3].type.get_canonical().kind == TypeKind.ULONG
assert fields[4].spelling == 'e'
assert not fields[4].type.is_const_qualified()
assert fields[4].type.kind == TypeKind.LONG
assert fields[4].type.get_canonical().kind == TypeKind.LONG
assert fields[5].spelling == 'f'
assert fields[5].type.is_const_qualified()
assert fields[5].type.kind == TypeKind.INT
assert fields[5].type.get_canonical().kind == TypeKind.INT
assert fields[6].spelling == 'g'
assert not fields[6].type.is_const_qualified()
assert fields[6].type.kind == TypeKind.POINTER
assert fields[6].type.get_pointee().kind == TypeKind.INT
assert fields[7].spelling == 'h'
assert not fields[7].type.is_const_qualified()
assert fields[7].type.kind == TypeKind.POINTER
assert fields[7].type.get_pointee().kind == TypeKind.POINTER
assert fields[7].type.get_pointee().get_pointee().kind == TypeKind.POINTER
assert fields[7].type.get_pointee().get_pointee().get_pointee().kind == TypeKind.INT
constarrayInput="""
struct teststruct {
void *A[2];
};
"""
def testConstantArray():
tu = get_tu(constarrayInput)
teststruct = get_cursor(tu, 'teststruct')
assert teststruct is not None, "Didn't find teststruct??"
fields = list(teststruct.get_children())
assert fields[0].spelling == 'A'
assert fields[0].type.kind == TypeKind.CONSTANTARRAY
assert fields[0].type.get_array_element_type() is not None
assert fields[0].type.get_array_element_type().kind == TypeKind.POINTER
assert fields[0].type.get_array_size() == 2
def test_equal():
"""Ensure equivalence operators work on Type."""
source = 'int a; int b; void *v;'
tu = get_tu(source)
a = get_cursor(tu, 'a')
b = get_cursor(tu, 'b')
v = get_cursor(tu, 'v')
assert a is not None
assert b is not None
assert v is not None
assert a.type == b.type
assert a.type != v.type
assert a.type != None
assert a.type != 'foo'
def test_typekind_spelling():
"""Ensure TypeKind.spelling works."""
tu = get_tu('int a;')
a = get_cursor(tu, 'a')
assert a is not None
assert a.type.kind.spelling == 'Int'
def test_function_argument_types():
"""Ensure that Type.argument_types() works as expected."""
tu = get_tu('void f(int, int);')
f = get_cursor(tu, 'f')
assert f is not None
args = f.type.argument_types()
assert args is not None
assert len(args) == 2
t0 = args[0]
assert t0 is not None
assert t0.kind == TypeKind.INT
t1 = args[1]
assert t1 is not None
assert t1.kind == TypeKind.INT
args2 = list(args)
assert len(args2) == 2
assert t0 == args2[0]
assert t1 == args2[1]
@raises(TypeError)
def test_argument_types_string_key():
"""Ensure that non-int keys raise a TypeError."""
tu = get_tu('void f(int, int);')
f = get_cursor(tu, 'f')
assert f is not None
args = f.type.argument_types()
assert len(args) == 2
args['foo']
@raises(IndexError)
def test_argument_types_negative_index():
"""Ensure that negative indexes on argument_types Raises an IndexError."""
tu = get_tu('void f(int, int);')
f = get_cursor(tu, 'f')
args = f.type.argument_types()
args[-1]
@raises(IndexError)
def test_argument_types_overflow_index():
"""Ensure that indexes beyond the length of Type.argument_types() raise."""
tu = get_tu('void f(int, int);')
f = get_cursor(tu, 'f')
args = f.type.argument_types()
args[2]
@raises(Exception)
def test_argument_types_invalid_type():
"""Ensure that obtaining argument_types on a Type without them raises."""
tu = get_tu('int i;')
i = get_cursor(tu, 'i')
assert i is not None
i.type.argument_types()
def test_is_pod():
"""Ensure Type.is_pod() works."""
tu = get_tu('int i; void f();')
i = get_cursor(tu, 'i')
f = get_cursor(tu, 'f')
assert i is not None
assert f is not None
assert i.type.is_pod()
assert not f.type.is_pod()
def test_function_variadic():
"""Ensure Type.is_function_variadic works."""
source ="""
#include <stdarg.h>
void foo(int a, ...);
void bar(int a, int b);
"""
tu = get_tu(source)
foo = get_cursor(tu, 'foo')
bar = get_cursor(tu, 'bar')
assert foo is not None
assert bar is not None
assert isinstance(foo.type.is_function_variadic(), bool)
assert foo.type.is_function_variadic()
assert not bar.type.is_function_variadic()
def test_element_type():
"""Ensure Type.element_type works."""
tu = get_tu('int i[5];')
i = get_cursor(tu, 'i')
assert i is not None
assert i.type.kind == TypeKind.CONSTANTARRAY
assert i.type.element_type.kind == TypeKind.INT
@raises(Exception)
def test_invalid_element_type():
"""Ensure Type.element_type raises if type doesn't have elements."""
tu = get_tu('int i;')
i = get_cursor(tu, 'i')
assert i is not None
i.element_type
def test_element_count():
"""Ensure Type.element_count works."""
tu = get_tu('int i[5]; int j;')
i = get_cursor(tu, 'i')
j = get_cursor(tu, 'j')
assert i is not None
assert j is not None
assert i.type.element_count == 5
try:
j.type.element_count
assert False
except:
assert True
def test_is_volatile_qualified():
"""Ensure Type.is_volatile_qualified works."""
tu = get_tu('volatile int i = 4; int j = 2;')
i = get_cursor(tu, 'i')
j = get_cursor(tu, 'j')
assert i is not None
assert j is not None
assert isinstance(i.type.is_volatile_qualified(), bool)
assert i.type.is_volatile_qualified()
assert not j.type.is_volatile_qualified()
def test_is_restrict_qualified():
"""Ensure Type.is_restrict_qualified works."""
tu = get_tu('struct s { void * restrict i; void * j };')
i = get_cursor(tu, 'i')
j = get_cursor(tu, 'j')
assert i is not None
assert j is not None
assert isinstance(i.type.is_restrict_qualified(), bool)
assert i.type.is_restrict_qualified()
assert not j.type.is_restrict_qualified()
|
pprivulet/DataScience
|
refs/heads/master
|
stock/stock.py
|
1
|
#/usr/bin/python
import urllib2
import pymongo as pm
import traceback
stockfile = open('code.bak','r')
client = pm.MongoClient("localhost",27017)
db = client.stockdb
stocks = db.stockName
stockRecord = db.stockRecord
termList = ['today_open_price', 'yesterday_close_price', 'current_price', 'today_highest', 'today_lowest', 'bid_buy', 'bid_sail', 'total_deal', 'total_money', 'buy_1_count', 'buy_1_price', 'buy_2_count', 'buy_2_price', 'buy_3_count', 'buy_3_price', 'buy_4_count', 'buy_4_price', 'buy_5_count', 'buy_5_price', 'sail_1_count', 'sail_1_price', 'sail_2_count', 'sail_2_price', 'sail_3_count', 'sail_3_price', 'sail_4_count', 'sail_4_price', 'sail_5_count', 'sail_5_price', 'date', 'time', 'last']
for stock in stockfile:
uri = "http://hq.sinajs.cn/list=%s"%stock
try:
f = urllib2.urlopen(uri)
content = f.readline().split('=')[1]
content = content[0:-2]
record = dict()
if len(content) > 2:
#print "stock:",content.decode('gbk').strip('"')
clist = content.decode('gbk').strip('"').split(",")
#print len(clist)
sn = { "_id":stock.strip('\n'), "name":clist[0] }
#print sn
for i in range(0,len(termList)):
record[termList[i]] = clist[i+1]
#print record
record['code_id'] = stock.strip('\n')
date = record['date']
record['date'] = int(''.join(date.split("-")))
record['datestr'] = date
stockRecord.insert(record)
#stocks.insert(sn)
else:
print "stock:","null"
except:
print stock,"error"
continue
|
koparasy/faultinjection-gem5
|
refs/heads/master
|
src/cpu/IntrControl.py
|
19
|
# Copyright (c) 2005-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
from m5.SimObject import SimObject
from m5.params import *
from m5.proxy import *
class IntrControl(SimObject):
type = 'IntrControl'
sys = Param.System(Parent.any, "the system we are part of")
|
elkingtonmcb/h2o-2
|
refs/heads/master
|
py/testdir_single_jvm/test_parse_multi_header_single_fvec.py
|
9
|
import unittest, time, sys, random, os
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_import as h2i
import h2o_browse as h2b
# for test debug
HEADER = True
dataRowsWithHeader = 0
# Don't write headerData if None (for non-header files)
# Don't write data if rowCount is None
def write_syn_dataset(csvPathname, rowCount, headerData, rList):
dsf = open(csvPathname, "w+")
if headerData is not None:
dsf.write(headerData + "\n")
if rowCount is not None:
for i in range(rowCount):
# two choices on the input. Make output choices random
r = rList[random.randint(0,1)] + "," + str(random.randint(0,7))
dsf.write(r + "\n")
dsf.close()
return rowCount # rows done
else:
dsf.close()
return 0 # rows done
def rand_rowData(colCount):
rowData = [random.randint(0,7) for i in range(colCount)]
rowData1= ",".join(map(str,rowData))
rowData = [random.randint(0,7) for i in range(colCount)]
rowData2= ",".join(map(str,rowData))
# RF will complain if all inputs are the same
r = [rowData1, rowData2]
return r
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED
SEED = h2o.setup_random_seed()
h2o.init(java_heap_GB=4,use_flatfile=True)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_parse_multi_header_single_fvec(self):
SYNDATASETS_DIR = h2o.make_syn_dir()
csvFilename = "syn_ints.csv"
csvPathname = SYNDATASETS_DIR + '/' + csvFilename
headerData = "ID,CAPSULE,AGE,RACE,DPROS,DCAPS,PSA,VOL,GLEASON,output"
# cols must be 9 to match the header above, otherwise a different bug is hit
# extra output is added, so it's 10 total
tryList = [
(57, 300, 9, 'cA', 60, 0),
# try with 1-3 data lines in the header file too
(57, 300, 9, 'cB', 60, 1),
(57, 300, 9, 'cC', 60, 2),
(57, 300, 9, 'cD', 60, 3),
]
trial = 0
for (fileNum, rowCount, colCount, hex_key, timeoutSecs, dataRowsWithHeader) in tryList:
trial += 1
# FIX! should we add a header to them randomly???
print "Wait while", fileNum, "synthetic files are created in", SYNDATASETS_DIR
rowxcol = str(rowCount) + 'x' + str(colCount)
totalCols = colCount + 1 # 1 extra for output
totalDataRows = 0
for fileN in range(fileNum):
csvFilename = 'syn_' + str(fileN) + "_" + str(SEED) + "_" + rowxcol + '.csv'
csvPathname = SYNDATASETS_DIR + '/' + csvFilename
rList = rand_rowData(colCount)
dataRowsDone = write_syn_dataset(csvPathname, rowCount, headerData=None, rList=rList)
totalDataRows += dataRowsDone
# create the header file
# can make it pass by not doing this
if HEADER:
csvFilename = 'syn_header_' + str(SEED) + "_" + rowxcol + '.csv'
csvPathname = SYNDATASETS_DIR + '/' + csvFilename
dataRowsDone = write_syn_dataset(csvPathname, dataRowsWithHeader, headerData, rList)
totalDataRows += dataRowsDone
# make sure all key names are unique, when we re-put and re-parse (h2o caching issues)
src_key = "syn_" + str(trial)
hex_key = "syn_" + str(trial) + ".hex"
# DON"T get redirected to S3! (EC2 hack in config, remember!)
# use it at the node level directly (because we gen'ed the files.
# I suppose we could force the redirect state bits in h2o.nodes[0] to False, instead?:w
# put them, rather than using import files, so this works if remote h2o is used
# and python creates the files locally
fileList = os.listdir(SYNDATASETS_DIR)
for f in fileList:
h2i.import_only(path=SYNDATASETS_DIR + "/" + f, schema='put', noPrint=True)
print f
if HEADER:
header = h2i.find_key('syn_header')
if not header:
raise Exception("Didn't find syn_header* key in the import")
# use regex. the only files in the dir will be the ones we just created with *fileN* match
print "Header Key = " + header
start = time.time()
parseResult = h2i.parse_only(pattern='*'+rowxcol+'*',
hex_key=hex_key, timeoutSecs=timeoutSecs, header="1", header_from_file=header)
print "parseResult['destination_key']: " + parseResult['destination_key']
inspect = h2o_cmd.runInspect(None, parseResult['destination_key'])
h2o_cmd.infoFromInspect(inspect, csvPathname)
print "\n" + csvPathname, \
" numRows:", "{:,}".format(inspect['numRows']), \
" numCols:", "{:,}".format(inspect['numCols'])
# should match # of cols in header or ??
self.assertEqual(inspect['numCols'], totalCols,
"parse created result with the wrong number of cols %s %s" % (inspect['numCols'], totalCols))
self.assertEqual(inspect['numRows'], totalDataRows,
"parse created result with the wrong number of rows (header shouldn't count) %s %s" % \
(inspect['numRows'], totalDataRows))
# put in an ignore param, that will fail unless headers were parsed correctly
if HEADER:
kwargs = {'sample_rate': 0.75, 'max_depth': 25, 'ntrees': 1, 'ignored_cols_by_name': 'ID,CAPSULE'}
else:
kwargs = {'sample_rate': 0.75, 'max_depth': 25, 'ntrees': 1}
start = time.time()
rfv = h2o_cmd.runRF(parseResult=parseResult, timeoutSecs=timeoutSecs, **kwargs)
elapsed = time.time() - start
print "%d pct. of timeout" % ((elapsed/timeoutSecs) * 100)
print "trial #", trial, "totalDataRows:", totalDataRows, "parse end on ", csvFilename, \
'took', time.time() - start, 'seconds'
h2o.check_sandbox_for_errors()
if __name__ == '__main__':
h2o.unit_main()
|
gojira/tensorflow
|
refs/heads/master
|
tensorflow/contrib/distributions/python/kernel_tests/statistical_testing_test.py
|
39
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the statistical testing library."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.distributions.python.ops import statistical_testing as st
from tensorflow.python.framework import ops
from tensorflow.python.platform import test
class StatisticalTestingTest(test.TestCase):
def test_dkwm_design_mean_one_sample_soundness(self):
thresholds = [1e-5, 1e-2, 1.1e-1, 0.9, 1., 1.02, 2., 10., 1e2, 1e5, 1e10]
rates = [1e-6, 1e-3, 1e-2, 1.1e-1, 0.2, 0.5, 0.7, 1.]
false_fail_rates, false_pass_rates = np.meshgrid(rates, rates)
false_fail_rates = false_fail_rates.flatten().astype(np.float32)
false_pass_rates = false_pass_rates.flatten().astype(np.float32)
detectable_discrepancies = []
for false_pass_rate, false_fail_rate in zip(
false_pass_rates, false_fail_rates):
sufficient_n = st.min_num_samples_for_dkwm_mean_test(
thresholds, low=0., high=1., false_fail_rate=false_fail_rate,
false_pass_rate=false_pass_rate)
detectable_discrepancies.append(
st.min_discrepancy_of_true_means_detectable_by_dkwm(
sufficient_n, low=0., high=1., false_fail_rate=false_fail_rate,
false_pass_rate=false_pass_rate))
detectable_discrepancies_ = self.evaluate(detectable_discrepancies)
for discrepancies, false_pass_rate, false_fail_rate in zip(
detectable_discrepancies_, false_pass_rates, false_fail_rates):
below_threshold = discrepancies <= thresholds
self.assertAllEqual(
np.ones_like(below_threshold, np.bool), below_threshold,
msg='false_pass_rate({}), false_fail_rate({})'.format(
false_pass_rate, false_fail_rate))
def test_dkwm_design_mean_two_sample_soundness(self):
thresholds = [1e-5, 1e-2, 1.1e-1, 0.9, 1., 1.02, 2., 10., 1e2, 1e5, 1e10]
rates = [1e-6, 1e-3, 1e-2, 1.1e-1, 0.2, 0.5, 0.7, 1.]
false_fail_rates, false_pass_rates = np.meshgrid(rates, rates)
false_fail_rates = false_fail_rates.flatten().astype(np.float32)
false_pass_rates = false_pass_rates.flatten().astype(np.float32)
detectable_discrepancies = []
for false_pass_rate, false_fail_rate in zip(
false_pass_rates, false_fail_rates):
[
sufficient_n1,
sufficient_n2
] = st.min_num_samples_for_dkwm_mean_two_sample_test(
thresholds, low1=0., high1=1., low2=0., high2=1.,
false_fail_rate=false_fail_rate,
false_pass_rate=false_pass_rate)
detectable_discrepancies.append(
st.min_discrepancy_of_true_means_detectable_by_dkwm_two_sample(
n1=sufficient_n1,
low1=0.,
high1=1.,
n2=sufficient_n2,
low2=0.,
high2=1.,
false_fail_rate=false_fail_rate,
false_pass_rate=false_pass_rate))
detectable_discrepancies_ = self.evaluate(detectable_discrepancies)
for discrepancies, false_pass_rate, false_fail_rate in zip(
detectable_discrepancies_, false_pass_rates, false_fail_rates):
below_threshold = discrepancies <= thresholds
self.assertAllEqual(
np.ones_like(below_threshold, np.bool), below_threshold,
msg='false_pass_rate({}), false_fail_rate({})'.format(
false_pass_rate, false_fail_rate))
def test_true_mean_confidence_interval_by_dkwm_one_sample(self):
rng = np.random.RandomState(seed=0)
num_samples = 5000
# 5000 samples is chosen to be enough to find discrepancies of
# size 0.1 or more with assurance 1e-6, as confirmed here:
d = st.min_discrepancy_of_true_means_detectable_by_dkwm(
num_samples, 0., 1., false_fail_rate=1e-6, false_pass_rate=1e-6)
d = self.evaluate(d)
self.assertLess(d, 0.1)
# Test that the confidence interval computed for the mean includes
# 0.5 and excludes 0.4 and 0.6.
samples = rng.uniform(size=num_samples).astype(np.float32)
(low, high) = st.true_mean_confidence_interval_by_dkwm(
samples, 0., 1., error_rate=1e-6)
low, high = self.evaluate([low, high])
self.assertGreater(low, 0.4)
self.assertLess(low, 0.5)
self.assertGreater(high, 0.5)
self.assertLess(high, 0.6)
def test_dkwm_mean_one_sample_assertion(self):
rng = np.random.RandomState(seed=0)
num_samples = 5000
# Test that the test assertion agrees that the mean of the standard
# uniform distribution is 0.5.
samples = rng.uniform(size=num_samples).astype(np.float32)
self.evaluate(st.assert_true_mean_equal_by_dkwm(
samples, 0., 1., 0.5, false_fail_rate=1e-6))
# Test that the test assertion confirms that the mean of the
# standard uniform distribution is not 0.4.
with self.assertRaisesOpError("true mean greater than expected"):
self.evaluate(st.assert_true_mean_equal_by_dkwm(
samples, 0., 1., 0.4, false_fail_rate=1e-6))
# Test that the test assertion confirms that the mean of the
# standard uniform distribution is not 0.6.
with self.assertRaisesOpError("true mean smaller than expected"):
self.evaluate(st.assert_true_mean_equal_by_dkwm(
samples, 0., 1., 0.6, false_fail_rate=1e-6))
def test_dkwm_mean_in_interval_one_sample_assertion(self):
rng = np.random.RandomState(seed=0)
num_samples = 5000
# Test that the test assertion agrees that the mean of the standard
# uniform distribution is between 0.4 and 0.6.
samples = rng.uniform(size=num_samples).astype(np.float32)
self.evaluate(st.assert_true_mean_in_interval_by_dkwm(
samples, 0., 1.,
expected_low=0.4, expected_high=0.6, false_fail_rate=1e-6))
# Test that the test assertion confirms that the mean of the
# standard uniform distribution is not between 0.2 and 0.4.
with self.assertRaisesOpError("true mean greater than expected"):
self.evaluate(st.assert_true_mean_in_interval_by_dkwm(
samples, 0., 1.,
expected_low=0.2, expected_high=0.4, false_fail_rate=1e-6))
# Test that the test assertion confirms that the mean of the
# standard uniform distribution is not between 0.6 and 0.8.
with self.assertRaisesOpError("true mean smaller than expected"):
self.evaluate(st.assert_true_mean_in_interval_by_dkwm(
samples, 0., 1.,
expected_low=0.6, expected_high=0.8, false_fail_rate=1e-6))
def test_dkwm_mean_two_sample_assertion(self):
rng = np.random.RandomState(seed=0)
num_samples = 4000
# 4000 samples is chosen to be enough to find discrepancies of
# size 0.2 or more with assurance 1e-6, as confirmed here:
d = st.min_discrepancy_of_true_means_detectable_by_dkwm_two_sample(
num_samples, 0., 1., num_samples, 0., 1.,
false_fail_rate=1e-6, false_pass_rate=1e-6)
d = self.evaluate(d)
self.assertLess(d, 0.2)
# Test that the test assertion agrees that the standard
# uniform distribution has the same mean as itself.
samples1 = rng.uniform(size=num_samples).astype(np.float32)
samples2 = rng.uniform(size=num_samples).astype(np.float32)
self.evaluate(st.assert_true_mean_equal_by_dkwm_two_sample(
samples1, 0., 1., samples2, 0., 1., false_fail_rate=1e-6))
def test_dkwm_mean_two_sample_assertion_beta_2_1_false(self):
rng = np.random.RandomState(seed=0)
num_samples = 4000
samples1 = rng.uniform(size=num_samples).astype(np.float32)
# As established above, 4000 samples is enough to find discrepancies
# of size 0.2 or more with assurance 1e-6.
# Test that the test assertion confirms that the mean of the
# standard uniform distribution is different from the mean of beta(2, 1).
beta_high_samples = rng.beta(2, 1, size=num_samples).astype(np.float32)
with self.assertRaisesOpError("true mean smaller than expected"):
self.evaluate(st.assert_true_mean_equal_by_dkwm_two_sample(
samples1, 0., 1.,
beta_high_samples, 0., 1.,
false_fail_rate=1e-6))
def test_dkwm_mean_two_sample_assertion_beta_1_2_false(self):
rng = np.random.RandomState(seed=0)
num_samples = 4000
samples1 = rng.uniform(size=num_samples).astype(np.float32)
# As established above, 4000 samples is enough to find discrepancies
# of size 0.2 or more with assurance 1e-6.
# Test that the test assertion confirms that the mean of the
# standard uniform distribution is different from the mean of beta(1, 2).
beta_low_samples = rng.beta(1, 2, size=num_samples).astype(np.float32)
with self.assertRaisesOpError("true mean greater than expected"):
self.evaluate(st.assert_true_mean_equal_by_dkwm_two_sample(
samples1, 0., 1.,
beta_low_samples, 0., 1.,
false_fail_rate=1e-6))
def test_dkwm_argument_validity_checking(self):
rng = np.random.RandomState(seed=0)
samples = rng.uniform(
low=[0., 1.], high=[1., 2.], size=(2500, 1, 2)).astype(np.float32)
# Test that the test library complains if the given samples fall
# outside the purported bounds.
with self.assertRaisesOpError("maximum value exceeds expectations"):
self.evaluate(st.true_mean_confidence_interval_by_dkwm(
samples, [[0., 1.]], [[0.5, 1.5]], error_rate=0.5))
with self.assertRaisesOpError("minimum value falls below expectations"):
self.evaluate(st.true_mean_confidence_interval_by_dkwm(
samples, [[0.5, 1.5]], [[1., 2.]], error_rate=0.5))
# But doesn't complain if they don't.
op = st.true_mean_confidence_interval_by_dkwm(
samples, [[0., 1.]], [[1., 2.]], error_rate=0.5)
_ = self.evaluate(op)
def test_do_maximum_mean(self):
n = 117
envelope = 0.02 # > 2 / n, but < 3 / n
rng = np.random.RandomState(seed=8)
samples = rng.uniform(size=n).astype(np.float32)
# Compute the answer in TF using the code under test
envelope_t = ops.convert_to_tensor(envelope)
max_mean = st._do_maximum_mean(samples, envelope_t, 1)
max_mean = self.evaluate(max_mean)
# Compute the correct answer for this case in numpy. In this
# example, `n` and `envelope` are such that `samples[2]` is the
# element that should be taken partially, regardless of the
# content of the `samples` array (see algorithm description in
# `../ops/statistical_testing.py`).
samples = sorted(samples)
weight = 1. / n - (envelope - 2. / n)
answer = samples[2] * weight + sum(samples[3:]) / n + envelope * 1.
self.assertAllClose(max_mean, answer, rtol=1e-9)
if __name__ == '__main__':
test.main()
|
clslabMSU/clustGUI
|
refs/heads/master
|
resultOP.py
|
1
|
# -*- coding: utf-8 -*-
"""
Created on Sat Apr 29 13:08:52 2017
@author: thy1995
"""
import numpy as np
def table_result (result, x_axis, y_axis):
result = np.concatenate((x_axis, result), axis = 0)
result = np.concatenate((result, np.transpose(y_axis)), axis = 1)
return result
|
kaiweifan/neutron
|
refs/heads/vip5
|
neutron/tests/unit/test_api_api_common.py
|
24
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (c) 2013 Intel Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Zhongyue Luo, Intel Corporation.
#
from testtools import matchers
from webob import exc
from neutron.api import api_common as common
from neutron.tests import base
class FakeController(common.NeutronController):
_resource_name = 'fake'
class APICommonTestCase(base.BaseTestCase):
def setUp(self):
super(APICommonTestCase, self).setUp()
self.controller = FakeController(None)
def test_prepare_request_body(self):
body = {
'fake': {
'name': 'terminator',
'model': 'T-800',
}
}
params = [
{'param-name': 'name',
'required': True},
{'param-name': 'model',
'required': True},
{'param-name': 'quote',
'required': False,
'default-value': "i'll be back"},
]
expect = {
'fake': {
'name': 'terminator',
'model': 'T-800',
'quote': "i'll be back",
}
}
actual = self.controller._prepare_request_body(body, params)
self.assertThat(expect, matchers.Equals(actual))
def test_prepare_request_body_none(self):
body = None
params = [
{'param-name': 'quote',
'required': False,
'default-value': "I'll be back"},
]
expect = {
'fake': {
'quote': "I'll be back",
}
}
actual = self.controller._prepare_request_body(body, params)
self.assertThat(expect, matchers.Equals(actual))
def test_prepare_request_body_keyerror(self):
body = {'t2': {}}
params = []
self.assertRaises(exc.HTTPBadRequest,
self.controller._prepare_request_body,
body,
params)
def test_prepare_request_param_value_none(self):
body = {
'fake': {
'name': None,
}
}
params = [
{'param-name': 'name',
'required': True},
]
self.assertRaises(exc.HTTPBadRequest,
self.controller._prepare_request_body,
body,
params)
|
jk1/intellij-community
|
refs/heads/master
|
python/testData/inspections/UnresolvedRefNoCreateFunction.py
|
80
|
<error descr="Unresolved reference 'my_ref'">my_<caret>ref</error>.do_smth(1, 2)
|
ms-iot/python
|
refs/heads/develop
|
cpython/Lib/dummy_threading.py
|
210
|
"""Faux ``threading`` version using ``dummy_thread`` instead of ``thread``.
The module ``_dummy_threading`` is added to ``sys.modules`` in order
to not have ``threading`` considered imported. Had ``threading`` been
directly imported it would have made all subsequent imports succeed
regardless of whether ``_thread`` was available which is not desired.
"""
from sys import modules as sys_modules
import _dummy_thread
# Declaring now so as to not have to nest ``try``s to get proper clean-up.
holding_thread = False
holding_threading = False
holding__threading_local = False
try:
# Could have checked if ``_thread`` was not in sys.modules and gone
# a different route, but decided to mirror technique used with
# ``threading`` below.
if '_thread' in sys_modules:
held_thread = sys_modules['_thread']
holding_thread = True
# Must have some module named ``_thread`` that implements its API
# in order to initially import ``threading``.
sys_modules['_thread'] = sys_modules['_dummy_thread']
if 'threading' in sys_modules:
# If ``threading`` is already imported, might as well prevent
# trying to import it more than needed by saving it if it is
# already imported before deleting it.
held_threading = sys_modules['threading']
holding_threading = True
del sys_modules['threading']
if '_threading_local' in sys_modules:
# If ``_threading_local`` is already imported, might as well prevent
# trying to import it more than needed by saving it if it is
# already imported before deleting it.
held__threading_local = sys_modules['_threading_local']
holding__threading_local = True
del sys_modules['_threading_local']
import threading
# Need a copy of the code kept somewhere...
sys_modules['_dummy_threading'] = sys_modules['threading']
del sys_modules['threading']
sys_modules['_dummy__threading_local'] = sys_modules['_threading_local']
del sys_modules['_threading_local']
from _dummy_threading import *
from _dummy_threading import __all__
finally:
# Put back ``threading`` if we overwrote earlier
if holding_threading:
sys_modules['threading'] = held_threading
del held_threading
del holding_threading
# Put back ``_threading_local`` if we overwrote earlier
if holding__threading_local:
sys_modules['_threading_local'] = held__threading_local
del held__threading_local
del holding__threading_local
# Put back ``thread`` if we overwrote, else del the entry we made
if holding_thread:
sys_modules['_thread'] = held_thread
del held_thread
else:
del sys_modules['_thread']
del holding_thread
del _dummy_thread
del sys_modules
|
MichalMaM/ella
|
refs/heads/master
|
test_ella/test_utils/test_installedapps.py
|
6
|
import sys
from ella.utils.installedapps import call_modules, app_modules_loaded
from nose import tools
def test_module_loaded_and_signal_fired():
call_modules(('loadme',))
tools.assert_true('test_ella.test_app.loadme' in sys.modules)
loadme = sys.modules['test_ella.test_app.loadme']
tools.assert_equals(1, len(loadme.run_log))
tools.assert_equals(((), {'signal': app_modules_loaded, 'sender': None}), loadme.run_log[0])
|
patrickstocklin/chattR
|
refs/heads/master
|
lib/python2.7/site-packages/textblob/en/__init__.py
|
18
|
# -*- coding: utf-8 -*-
'''This file is based on pattern.en. See the bundled NOTICE file for
license information.
'''
from __future__ import absolute_import
import os
from textblob._text import (Parser as _Parser, Sentiment as _Sentiment, Lexicon,
WORD, POS, CHUNK, PNP, PENN, UNIVERSAL, Spelling)
from textblob.compat import text_type, unicode
try:
MODULE = os.path.dirname(os.path.abspath(__file__))
except:
MODULE = ""
spelling = Spelling(
path = os.path.join(MODULE, "en-spelling.txt")
)
#--- ENGLISH PARSER --------------------------------------------------------------------------------
def find_lemmata(tokens):
""" Annotates the tokens with lemmata for plural nouns and conjugated verbs,
where each token is a [word, part-of-speech] list.
"""
for token in tokens:
word, pos, lemma = token[0], token[1], token[0]
# cats => cat
if pos == "NNS":
lemma = singularize(word)
# sat => sit
if pos.startswith(("VB", "MD")):
lemma = conjugate(word, INFINITIVE) or word
token.append(lemma.lower())
return tokens
class Parser(_Parser):
def find_lemmata(self, tokens, **kwargs):
return find_lemmata(tokens)
def find_tags(self, tokens, **kwargs):
if kwargs.get("tagset") in (PENN, None):
kwargs.setdefault("map", lambda token, tag: (token, tag))
if kwargs.get("tagset") == UNIVERSAL:
kwargs.setdefault("map", lambda token, tag: penntreebank2universal(token, tag))
return _Parser.find_tags(self, tokens, **kwargs)
class Sentiment(_Sentiment):
def load(self, path=None):
_Sentiment.load(self, path)
# Map "terrible" to adverb "terribly" (+1% accuracy)
if not path:
for w, pos in list(dict.items(self)):
if "JJ" in pos:
if w.endswith("y"):
w = w[:-1] + "i"
if w.endswith("le"):
w = w[:-2]
p, s, i = pos["JJ"]
self.annotate(w + "ly", "RB", p, s, i)
lexicon = Lexicon(
path = os.path.join(MODULE, "en-lexicon.txt"),
morphology = os.path.join(MODULE, "en-morphology.txt"),
context = os.path.join(MODULE, "en-context.txt"),
entities = os.path.join(MODULE, "en-entities.txt"),
language = "en"
)
parser = Parser(
lexicon = lexicon,
default = ("NN", "NNP", "CD"),
language = "en"
)
sentiment = Sentiment(
path = os.path.join(MODULE, "en-sentiment.xml"),
synset = "wordnet_id",
negations = ("no", "not", "n't", "never"),
modifiers = ("RB",),
modifier = lambda w: w.endswith("ly"),
tokenizer = parser.find_tokens,
language = "en"
)
def tokenize(s, *args, **kwargs):
""" Returns a list of sentences, where punctuation marks have been split from words.
"""
return parser.find_tokens(text_type(s), *args, **kwargs)
def parse(s, *args, **kwargs):
""" Returns a tagged Unicode string.
"""
return parser.parse(unicode(s), *args, **kwargs)
def parsetree(s, *args, **kwargs):
""" Returns a parsed Text from the given string.
"""
return Text(parse(unicode(s), *args, **kwargs))
def split(s, token=[WORD, POS, CHUNK, PNP]):
""" Returns a parsed Text from the given parsed string.
"""
return Text(text_type(s), token)
def tag(s, tokenize=True, encoding="utf-8"):
""" Returns a list of (token, tag)-tuples from the given string.
"""
tags = []
for sentence in parse(s, tokenize, True, False, False, False, encoding).split():
for token in sentence:
tags.append((token[0], token[1]))
return tags
def suggest(w):
""" Returns a list of (word, confidence)-tuples of spelling corrections.
"""
return spelling.suggest(w)
def polarity(s, **kwargs):
""" Returns the sentence polarity (positive/negative) between -1.0 and 1.0.
"""
return sentiment(unicode(s), **kwargs)[0]
def subjectivity(s, **kwargs):
""" Returns the sentence subjectivity (objective/subjective) between 0.0 and 1.0.
"""
return sentiment(unicode(s), **kwargs)[1]
def positive(s, threshold=0.1, **kwargs):
""" Returns True if the given sentence has a positive sentiment (polarity >= threshold).
"""
return polarity(unicode(s), **kwargs) >= threshold
|
michalliu/OpenWrt-Firefly-Libraries
|
refs/heads/master
|
staging_dir/target-mipsel_1004kc+dsp_uClibc-0.9.33.2/usr/lib/python2.7/distutils/command/check.py
|
98
|
"""distutils.command.check
Implements the Distutils 'check' command.
"""
__revision__ = "$Id$"
from distutils.core import Command
from distutils.dist import PKG_INFO_ENCODING
from distutils.errors import DistutilsSetupError
try:
# docutils is installed
from docutils.utils import Reporter
from docutils.parsers.rst import Parser
from docutils import frontend
from docutils import nodes
from StringIO import StringIO
class SilentReporter(Reporter):
def __init__(self, source, report_level, halt_level, stream=None,
debug=0, encoding='ascii', error_handler='replace'):
self.messages = []
Reporter.__init__(self, source, report_level, halt_level, stream,
debug, encoding, error_handler)
def system_message(self, level, message, *children, **kwargs):
self.messages.append((level, message, children, kwargs))
return nodes.system_message(message, level=level,
type=self.levels[level],
*children, **kwargs)
HAS_DOCUTILS = True
except ImportError:
# docutils is not installed
HAS_DOCUTILS = False
class check(Command):
"""This command checks the meta-data of the package.
"""
description = ("perform some checks on the package")
user_options = [('metadata', 'm', 'Verify meta-data'),
('restructuredtext', 'r',
('Checks if long string meta-data syntax '
'are reStructuredText-compliant')),
('strict', 's',
'Will exit with an error if a check fails')]
boolean_options = ['metadata', 'restructuredtext', 'strict']
def initialize_options(self):
"""Sets default values for options."""
self.restructuredtext = 0
self.metadata = 1
self.strict = 0
self._warnings = 0
def finalize_options(self):
pass
def warn(self, msg):
"""Counts the number of warnings that occurs."""
self._warnings += 1
return Command.warn(self, msg)
def run(self):
"""Runs the command."""
# perform the various tests
if self.metadata:
self.check_metadata()
if self.restructuredtext:
if HAS_DOCUTILS:
self.check_restructuredtext()
elif self.strict:
raise DistutilsSetupError('The docutils package is needed.')
# let's raise an error in strict mode, if we have at least
# one warning
if self.strict and self._warnings > 0:
raise DistutilsSetupError('Please correct your package.')
def check_metadata(self):
"""Ensures that all required elements of meta-data are supplied.
name, version, URL, (author and author_email) or
(maintainer and maintainer_email)).
Warns if any are missing.
"""
metadata = self.distribution.metadata
missing = []
for attr in ('name', 'version', 'url'):
if not (hasattr(metadata, attr) and getattr(metadata, attr)):
missing.append(attr)
if missing:
self.warn("missing required meta-data: %s" % ', '.join(missing))
if metadata.author:
if not metadata.author_email:
self.warn("missing meta-data: if 'author' supplied, " +
"'author_email' must be supplied too")
elif metadata.maintainer:
if not metadata.maintainer_email:
self.warn("missing meta-data: if 'maintainer' supplied, " +
"'maintainer_email' must be supplied too")
else:
self.warn("missing meta-data: either (author and author_email) " +
"or (maintainer and maintainer_email) " +
"must be supplied")
def check_restructuredtext(self):
"""Checks if the long string fields are reST-compliant."""
data = self.distribution.get_long_description()
if not isinstance(data, unicode):
data = data.decode(PKG_INFO_ENCODING)
for warning in self._check_rst_data(data):
line = warning[-1].get('line')
if line is None:
warning = warning[1]
else:
warning = '%s (line %s)' % (warning[1], line)
self.warn(warning)
def _check_rst_data(self, data):
"""Returns warnings when the provided data doesn't compile."""
source_path = StringIO()
parser = Parser()
settings = frontend.OptionParser().get_default_values()
settings.tab_width = 4
settings.pep_references = None
settings.rfc_references = None
reporter = SilentReporter(source_path,
settings.report_level,
settings.halt_level,
stream=settings.warning_stream,
debug=settings.debug,
encoding=settings.error_encoding,
error_handler=settings.error_encoding_error_handler)
document = nodes.document(settings, reporter, source=source_path)
document.note_source(source_path, -1)
try:
parser.parse(data, document)
except AttributeError:
reporter.messages.append((-1, 'Could not finish the parsing.',
'', {}))
return reporter.messages
|
morissette/devopsdays-hackathon-2016
|
refs/heads/master
|
venv/lib/python2.7/site-packages/pip/_vendor/html5lib/treeadapters/sax.py
|
1835
|
from __future__ import absolute_import, division, unicode_literals
from xml.sax.xmlreader import AttributesNSImpl
from ..constants import adjustForeignAttributes, unadjustForeignAttributes
prefix_mapping = {}
for prefix, localName, namespace in adjustForeignAttributes.values():
if prefix is not None:
prefix_mapping[prefix] = namespace
def to_sax(walker, handler):
"""Call SAX-like content handler based on treewalker walker"""
handler.startDocument()
for prefix, namespace in prefix_mapping.items():
handler.startPrefixMapping(prefix, namespace)
for token in walker:
type = token["type"]
if type == "Doctype":
continue
elif type in ("StartTag", "EmptyTag"):
attrs = AttributesNSImpl(token["data"],
unadjustForeignAttributes)
handler.startElementNS((token["namespace"], token["name"]),
token["name"],
attrs)
if type == "EmptyTag":
handler.endElementNS((token["namespace"], token["name"]),
token["name"])
elif type == "EndTag":
handler.endElementNS((token["namespace"], token["name"]),
token["name"])
elif type in ("Characters", "SpaceCharacters"):
handler.characters(token["data"])
elif type == "Comment":
pass
else:
assert False, "Unknown token type"
for prefix, namespace in prefix_mapping.items():
handler.endPrefixMapping(prefix)
handler.endDocument()
|
byndcivilization/toy-infrastructure
|
refs/heads/master
|
flask-app/venv/lib/python3.6/site-packages/pip/vcs/git.py
|
340
|
from __future__ import absolute_import
import logging
import tempfile
import os.path
from pip.compat import samefile
from pip.exceptions import BadCommand
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._vendor.six.moves.urllib import request as urllib_request
from pip._vendor.packaging.version import parse as parse_version
from pip.utils import display_path, rmtree
from pip.vcs import vcs, VersionControl
urlsplit = urllib_parse.urlsplit
urlunsplit = urllib_parse.urlunsplit
logger = logging.getLogger(__name__)
class Git(VersionControl):
name = 'git'
dirname = '.git'
repo_name = 'clone'
schemes = (
'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file',
)
def __init__(self, url=None, *args, **kwargs):
# Works around an apparent Git bug
# (see http://article.gmane.org/gmane.comp.version-control.git/146500)
if url:
scheme, netloc, path, query, fragment = urlsplit(url)
if scheme.endswith('file'):
initial_slashes = path[:-len(path.lstrip('/'))]
newpath = (
initial_slashes +
urllib_request.url2pathname(path)
.replace('\\', '/').lstrip('/')
)
url = urlunsplit((scheme, netloc, newpath, query, fragment))
after_plus = scheme.find('+') + 1
url = scheme[:after_plus] + urlunsplit(
(scheme[after_plus:], netloc, newpath, query, fragment),
)
super(Git, self).__init__(url, *args, **kwargs)
def get_git_version(self):
VERSION_PFX = 'git version '
version = self.run_command(['version'], show_stdout=False)
if version.startswith(VERSION_PFX):
version = version[len(VERSION_PFX):]
else:
version = ''
# get first 3 positions of the git version becasue
# on windows it is x.y.z.windows.t, and this parses as
# LegacyVersion which always smaller than a Version.
version = '.'.join(version.split('.')[:3])
return parse_version(version)
def export(self, location):
"""Export the Git repository at the url to the destination location"""
temp_dir = tempfile.mkdtemp('-export', 'pip-')
self.unpack(temp_dir)
try:
if not location.endswith('/'):
location = location + '/'
self.run_command(
['checkout-index', '-a', '-f', '--prefix', location],
show_stdout=False, cwd=temp_dir)
finally:
rmtree(temp_dir)
def check_rev_options(self, rev, dest, rev_options):
"""Check the revision options before checkout to compensate that tags
and branches may need origin/ as a prefix.
Returns the SHA1 of the branch or tag if found.
"""
revisions = self.get_short_refs(dest)
origin_rev = 'origin/%s' % rev
if origin_rev in revisions:
# remote branch
return [revisions[origin_rev]]
elif rev in revisions:
# a local tag or branch name
return [revisions[rev]]
else:
logger.warning(
"Could not find a tag or branch '%s', assuming commit.", rev,
)
return rev_options
def check_version(self, dest, rev_options):
"""
Compare the current sha to the ref. ref may be a branch or tag name,
but current rev will always point to a sha. This means that a branch
or tag will never compare as True. So this ultimately only matches
against exact shas.
"""
return self.get_revision(dest).startswith(rev_options[0])
def switch(self, dest, url, rev_options):
self.run_command(['config', 'remote.origin.url', url], cwd=dest)
self.run_command(['checkout', '-q'] + rev_options, cwd=dest)
self.update_submodules(dest)
def update(self, dest, rev_options):
# First fetch changes from the default remote
if self.get_git_version() >= parse_version('1.9.0'):
# fetch tags in addition to everything else
self.run_command(['fetch', '-q', '--tags'], cwd=dest)
else:
self.run_command(['fetch', '-q'], cwd=dest)
# Then reset to wanted revision (maybe even origin/master)
if rev_options:
rev_options = self.check_rev_options(
rev_options[0], dest, rev_options,
)
self.run_command(['reset', '--hard', '-q'] + rev_options, cwd=dest)
#: update submodules
self.update_submodules(dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
if rev:
rev_options = [rev]
rev_display = ' (to %s)' % rev
else:
rev_options = ['origin/master']
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.info(
'Cloning %s%s to %s', url, rev_display, display_path(dest),
)
self.run_command(['clone', '-q', url, dest])
if rev:
rev_options = self.check_rev_options(rev, dest, rev_options)
# Only do a checkout if rev_options differs from HEAD
if not self.check_version(dest, rev_options):
self.run_command(
['checkout', '-q'] + rev_options,
cwd=dest,
)
#: repo may contain submodules
self.update_submodules(dest)
def get_url(self, location):
"""Return URL of the first remote encountered."""
remotes = self.run_command(
['config', '--get-regexp', 'remote\..*\.url'],
show_stdout=False, cwd=location)
remotes = remotes.splitlines()
found_remote = remotes[0]
for remote in remotes:
if remote.startswith('remote.origin.url '):
found_remote = remote
break
url = found_remote.split(' ')[1]
return url.strip()
def get_revision(self, location):
current_rev = self.run_command(
['rev-parse', 'HEAD'], show_stdout=False, cwd=location)
return current_rev.strip()
def get_full_refs(self, location):
"""Yields tuples of (commit, ref) for branches and tags"""
output = self.run_command(['show-ref'],
show_stdout=False, cwd=location)
for line in output.strip().splitlines():
commit, ref = line.split(' ', 1)
yield commit.strip(), ref.strip()
def is_ref_remote(self, ref):
return ref.startswith('refs/remotes/')
def is_ref_branch(self, ref):
return ref.startswith('refs/heads/')
def is_ref_tag(self, ref):
return ref.startswith('refs/tags/')
def is_ref_commit(self, ref):
"""A ref is a commit sha if it is not anything else"""
return not any((
self.is_ref_remote(ref),
self.is_ref_branch(ref),
self.is_ref_tag(ref),
))
# Should deprecate `get_refs` since it's ambiguous
def get_refs(self, location):
return self.get_short_refs(location)
def get_short_refs(self, location):
"""Return map of named refs (branches or tags) to commit hashes."""
rv = {}
for commit, ref in self.get_full_refs(location):
ref_name = None
if self.is_ref_remote(ref):
ref_name = ref[len('refs/remotes/'):]
elif self.is_ref_branch(ref):
ref_name = ref[len('refs/heads/'):]
elif self.is_ref_tag(ref):
ref_name = ref[len('refs/tags/'):]
if ref_name is not None:
rv[ref_name] = commit
return rv
def _get_subdirectory(self, location):
"""Return the relative path of setup.py to the git repo root."""
# find the repo root
git_dir = self.run_command(['rev-parse', '--git-dir'],
show_stdout=False, cwd=location).strip()
if not os.path.isabs(git_dir):
git_dir = os.path.join(location, git_dir)
root_dir = os.path.join(git_dir, '..')
# find setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
# We've traversed up to the root of the filesystem without
# finding setup.py
logger.warning(
"Could not find setup.py for directory %s (tried all "
"parent directories)",
orig_location,
)
return None
# relative path of setup.py to repo root
if samefile(root_dir, location):
return None
return os.path.relpath(location, root_dir)
def get_src_requirement(self, dist, location):
repo = self.get_url(location)
if not repo.lower().startswith('git:'):
repo = 'git+' + repo
egg_project_name = dist.egg_name().split('-', 1)[0]
if not repo:
return None
current_rev = self.get_revision(location)
req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name)
subdirectory = self._get_subdirectory(location)
if subdirectory:
req += '&subdirectory=' + subdirectory
return req
def get_url_rev(self):
"""
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
That's required because although they use SSH they sometimes doesn't
work with a ssh:// scheme (e.g. Github). But we need a scheme for
parsing. Hence we remove it again afterwards and return it as a stub.
"""
if '://' not in self.url:
assert 'file:' not in self.url
self.url = self.url.replace('git+', 'git+ssh://')
url, rev = super(Git, self).get_url_rev()
url = url.replace('ssh://', '')
else:
url, rev = super(Git, self).get_url_rev()
return url, rev
def update_submodules(self, location):
if not os.path.exists(os.path.join(location, '.gitmodules')):
return
self.run_command(
['submodule', 'update', '--init', '--recursive', '-q'],
cwd=location,
)
@classmethod
def controls_location(cls, location):
if super(Git, cls).controls_location(location):
return True
try:
r = cls().run_command(['rev-parse'],
cwd=location,
show_stdout=False,
on_returncode='ignore')
return not r
except BadCommand:
logger.debug("could not determine if %s is under git control "
"because git is not available", location)
return False
vcs.register(Git)
|
anhstudios/swganh
|
refs/heads/develop
|
data/scripts/templates/object/draft_schematic/clothing/shared_clothing_ith_hat_casual_03.py
|
2
|
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Intangible()
result.template = "object/draft_schematic/clothing/shared_clothing_ith_hat_casual_03.iff"
result.attribute_template_id = -1
result.stfName("string_id_table","")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result
|
HSC-Users/hscTools
|
refs/heads/master
|
bick/python/meeus.py
|
1
|
#!/usr/bin/env python
import numpy as np
import inspect
RAD = np.pi/180.0
DEG = 180.0/np.pi
JD2000 = 2451545.00
def JCentury (JD):
return (JD - JD2000)/36525.0
def T2JD(T):
return 36525.0*T + JD2000
def eclipticObliquity (JD, debug=False):
T = JCentury(JD)
U = T / 100.0
correction = - 4680.93 * U \
- 1.55 * U**2\
+ 1999.25 * U**3\
- 51.38 * U**4\
- 249.67 * U**5\
- 39.05 * U**6\
+ 7.12 * U**7\
+ 27.87 * U**8\
+ 5.79 * U**9\
+ 2.45 * U**10
epsilon0 = 23.0 + 26.0/60.0 + (21.488 + correction)/3600.0
if debug:
print "%-24s %f" % (inspect.stack()[0][3], epsilon0)
return epsilon0
def reduceAngle(angle):
angle = angle - int(angle/360.0)*360
if angle < 0.0:
angle += 360.0
return angle
# ------------------------------------------------------------------
# ------------------------------------------------------------------
def JD2epoch(JD):
epoch = 2000 + (JD - JD2000)/365.25
return epoch
def epoch2JD(epoch):
jd = 365.25*(epoch - 2000.0) + JD2000
return jd
def T2epoch(T):
return JD2epoch(T2JD(T))
# --------------------------------------------------------------------
# --------------------------------------------------------------------
def sunGeoMeanLong (T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
L0 = reduceAngle(280.46646 + 36000.76983*T + 0.0003032*T**2.0)
if debug:
print "%-24s %f" % (inspect.stack()[0][3], L0)
return L0
# --------------------------------------------------------------------
# --------------------------------------------------------------------
def sunMeanAnom (T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
M = reduceAngle(357.52911 + 35999.05029*T + 0.0001537*T**2.0)
if debug:
print "%-24s %f" % (inspect.stack()[0][3], M)
return M
# --------------------------------------------------------------------
# --------------------------------------------------------------------
def sunEquationOfCenter (T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
M = sunMeanAnom (T)
C = reduceAngle((1.914602 - 0.004817*T - 0.000014*T**2.0 )*np.sin(RAD*M) \
+ ( 0.019993 - 0.000101*T )*np.sin(RAD*2.0*M) \
+ 0.000289*np.sin(RAD*3.0*M))
if debug:
print "%-24s %f" % (inspect.stack()[0][3], C)
return C
# --------------------------------------------------------------------
# --------------------------------------------------------------------
def sunTrueLongitude(T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
L0 = sunGeoMeanLong(T)
C = sunEquationOfCenter(T)
ret = reduceAngle(L0 + C)
if debug:
print "%-24s %f" % (inspect.stack()[0][3], ret)
return ret
# --------------------------------------------------------------------
# --------------------------------------------------------------------
def sunTrueLongJ2000(T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
epoch = T2epoch(T)
trueLong = sunTrueLongitude(T)
trueLongJ2000 = reduceAngle(trueLong - 0.01397*(epoch - 2000.0))
if debug:
print "%-24s %f" % (inspect.stack()[0][3], trueLongJ2000)
return trueLongJ2000
# --------------------------------------------------------------------
# --------------------------------------------------------------------
def sunRAdec(T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
trueLongJ2000 = RAD * sunTrueLongJ2000(T)
epsilon0 = RAD * eclipticObliquity(T)
alpha = np.atan2(np.cos(epsilon0)*np.sin(trueLongJ2000), np.cos(trueLongJ2000))
delta = np.asin( np.sin(epsilon0)*np.sin(trueLongJ2000) )
alpha = reduceAngle(DEG*alpha)
delta = DEG*delta
if debug:
print "%-24s %f %f" % (inspect.stack()[0][3], alpha, delta)
return alpha, delta
# --------------------------------------------------------------------
# --------------------------------------------------------------------
def sunAppRAdec (T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
lamb = RAD * sunAppLongitude(T)
Omega = RAD* (125.04 - 1934.136*T)
epsilon0 = eclipticObliquity(T)
epsilon = RAD * ( epsilon0 + 0.00256*np.cos(Omega) )
alpha = np.atan2(np.cos(epsilon)*np.sin(lamb), np.cos(lamb))
delta = np.asin( np.sin(epsilon)*np.sin(lamb) )
alpha = reduceAngle(DEG*alpha)
delta = DEG*delta
if debug:
print "%-24s %f %f" % (inspect.stack()[0][3], alpha, delta)
return alpha, delta
def moonMeanLong(T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
Lp = reduceAngle(218.3164477 + 481267.88123421*T - 0.0015786*T*T + T*T*T/538841.0 - T**4/65194000.0)
if debug:
print "%-24s %f" % (inspect.stack()[0][3], Lp)
return Lp
def moonMeanElong(T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
D = reduceAngle(297.8501921 + 445267.1114034*T - 0.0018819*T*T + T*T*T/545868.0 - T**4/113065000.0)
if debug:
print "%-24s %f" % (inspect.stack()[0][3], D)
return D
def moonMeanAnom(T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
Mp = reduceAngle(134.9633964 + 477198.8675055*T + 0.0087414*T*T + T*T*T/69699.0 + T**4/14712000.0)
if debug:
print "%-24s %f" % (inspect.stack()[0][3], Mp)
return Mp
def moonArgOfLat(T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
F = reduceAngle(93.2720950 + 483202.0175233*T - 0.0036539*T*T - T*T*T/3526000.0 + T**4/863310000.0)
if debug:
print "%-24s %f" % (inspect.stack()[0][3], F)
return F
def lunarPosition(T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
Lp = RAD*moonMeanLong(T, debug=debug)
D = RAD*moonMeanElong(T, debug=debug)
M = RAD*sunMeanAnom(T, debug=debug)
Mp = RAD*moonMeanAnom(T, debug=debug)
F = RAD*moonArgOfLat(T, debug=debug)
A1 = RAD*reduceAngle((119.75 + 131.849*T))
A2 = RAD*reduceAngle((53.09 + 479264.290*T))
A3 = RAD*reduceAngle((313.45 + 481266.484*T))
E = RAD*reduceAngle(1.0 - 0.002516*T - 0.0000074*T*T)
if debug:
print "%-24s A1,A2,A3,E %f %f %f %f" % (inspect.stack()[0][3], DEG*A1, DEG*A2, DEG*A3, DEG*E)
sigL = 3958.0*np.sin(A1) + 1962.0*np.sin(Lp - F) + 318.0*np.sin(A2)
sigR = 0.0
for arr in table47a():
cD, cM, cMp, cF, sl, sr = arr
if False: #cM in (1,-1):
sl *= E
sr *= E
if False: #cM in (2, -2):
sl *= E*E
sr *= E*E
arg = cD*D + cM*M + cMp*Mp + cF*F
sigL += sl*np.sin(arg)
sigR += sr*np.cos(arg)
sigB = -2235.0*np.sin(Lp) \
+ 382.0*np.sin(A3) \
+ 175.0*np.sin(A1 - F) \
+ 175.0*np.sin(A1 + F) \
+ 127.0*np.sin(Lp - Mp) \
- 115.0*np.sin(Lp + Mp)
for arr in table47b():
cD, cM, cMp, cF, sl = arr
if False: #cM in (1,-1):
print cM
sl *= E
if False: #cM in (2, -2):
print cM
sl *= E*E
arg = cD*D + cM*M + cMp*Mp + cF*F
sigB += sl*np.sin(arg)
if debug:
print "sL,B,R = ", -1127527, -3229126, -16590875
print "%-24s sigL,B,R %f %f %f" % (inspect.stack()[0][3], sigL, sigB, sigR)
lamb = reduceAngle(DEG*Lp + sigL/1000000.0)
beta = sigB/1000000.0
delta = 385000.56 + sigR/1000.0
if debug:
print "%-24s lamb,beta,delt %f %f %f" % (inspect.stack()[0][3], lamb, beta, delta)
return lamb, beta, delta
def moonIllumFrac(T, isJD=False, debug=False):
if isJD:
T = JCentury(T)
highPrecision = False
if highPrecision:
# high precision
lamb, beta, earth_moon_dist = lunarPosition(T)
alpha, delta = sunAppRAdec(T)
lamb0, _ = eq2ecl(RAD*alpha, RAD*delta)
cos_psi = np.cos(beta)*np.cos(RAD*lamb - lamb0)
psi = np.acos(cos_psi)
Ro = 1.5e11
tani = (Ro*np.sin(psi))/(earth_moon_dist - Ro*cos_psi)
i = np.atan(tani)
else:
D = RAD*moonMeanElong(T, debug=debug)
M = RAD*sunMeanAnom(T, debug=debug)
Mp = RAD*moonMeanAnom(T, debug=debug)
i = reduceAngle(180.0
- DEG*D
- 6.289*np.sin(Mp)
+ 2.100*np.sin(M)
- 1.274*np.sin(2*D - Mp)
- 0.658*np.sin(2*D)
- 0.214*np.sin(2*Mp)
- 0.110*np.sin(D))
k = (1.0 + np.cos(RAD*i))/2.0
if debug:
print "%-24s %f %f" % (inspect.stack()[0][3], i, k)
return k
# ------------------------------------------------------------------
# ------------------------------------------------------------------
def calendar2JD (Y, M, D, H=0, min=0, S=0):
HpD = 24.0
minpD = HpD*60.0
SpD = minpD*60.0
if ( M <= 2 ):
Y -= 1
M += 12
A = int(Y/100)
B = 2 - A + int(A/4)
(y,m,d) = (1582, 10, 4)
if (Y<y or
(Y==y and M<m) or
(Y==y and M==m and D<=4)):
B = 0
JD = int(365.25*(Y + 4716)) + int(30.6001*(M+1)) + D + B - 1524.5
JD += H/HpD + min/minpD + S/SpD
return JD
# ------------------------------------------------------------------
# ------------------------------------------------------------------
def JD2calendar (JD):
JD += 0.5
Z = int (JD) # integer part
F = JD - Z # decimal part
alpha = int( (Z - 1867216.25)/36524.25 )
A = A if ( Z < 2299161 ) else Z + 1 + alpha - int(alpha/4)
B = A + 1524
C = int( (B - 122.1)/365.25 )
D = int( 365.25*C )
E = int( (B-D)/30.6001 )
mday = B - D - int(30.6001*E) + F
mon = E-1 if (E < 14) else (E-13)
year = C-4716 if (mon > 2) else (C-4715)
hour = 24.0*F
H = int(hour)
min = (hour - H)*60.0
Min = int(min)
s = (min - Min)*60.0
return (year, mon, mday, H, Min, s)
# ------------------------------------------------------------------
# ------------------------------------------------------------------
def calendar2epoch(Y, M, D, H=0, min=0, S=0):
JD = calendar2JD(Y,M,D, H,min,S)
epoch = 2000.0 + 100.0*JCentury(JD)
return epoch
# ------------------------------------------------------------------
# ------------------------------------------------------------------
def epoch2calendar (epoch):
jd = (epoch - 2000.0) * 365.25
JD = JD2000 + jd
(Y, M, D, H, min, S) = JD2calendar(JD)
return (Y, M, D, H, min, S)
# ----------------------------------------------------------------
# ----------------------------------------------------------------
def yearDay(Y, M, D):
is_leap = 1 if (not (Y % 4) and (Y % 400) ) else 0
K = 1 if (is_leap) else 2
yday = int(275.0*M/9.0) - K*int( (M+9.0)/12.0 ) + D - 30
return yday
####################################################################
#
# 12 12 12 12
#
###################################################################
def greenwichSidereal0hUT (JD):
Y, M, D, H, m, S = JD2calendar(JD)
JDmidnight = calendar2JD(Y, M, D, 0, 0, 0)
T = JCentury(JDmidnight)
theta0 = 100.46061837 + 36000.770053608*T + 0.0003879330*T**2 - T**3/38710000.0
return reduceAngle(theta0)
def greenwichSidereal (JD):
T = JCentury(JD)
theta0 = 280.46061837 + 360.98564736629*(JD - JD2000) + 0.0003879330*T**2 - T**3/38710000.0
return reduceAngle(theta0)
def ecl2eq(lamb, beta, JD):
epsilon = RAD*eclipticObliquity(JD)
numerator = np.sin(lamb)*np.cos(epsilon) - \
np.tan(beta)*np.sin(epsilon)
denominator = np.cos(lamb)
alpha = np.atan2 (numerator, denominator)
delta = np.asin( np.sin(beta)*np.cos(epsilon) +
np.cos(beta)*np.sin(epsilon)*np.sin(lamb) )
return alpha, delta
def eq2ecl (alpha, delta, JD):
epsilon = RAD*eclipticObliquity(JD)
numerator = np.sin(alpha)*np.cos(epsilon) + np.tan(delta)*np.sin(epsilon)
denominator = np.cos(alpha)
lamb = np.atan2 (numerator, denominator)
beta = np.asin( np.sin(delta)*np.cos(epsilon) -
np.cos(delta)*np.sin(epsilon)*sin(alpha) )
return lamb, beta
#------------------------------------------------------------------
# function: mag2flux()
# Purpose: Get the flux from a star given its magnitude
# Req'd parameters: filter = photometric filter
# magnitude = self-expl.
#------------------------------------------------------------------
def mag2flux (filt, mag, exptime=1.0, radius=1.0/np.sqrt(np.pi)):
area = np.pi*radius**2
# get the flux
# http://www.astro.utoronto.ca/~patton/astro/mags.html#flux
#1 Jy = 10^-23 erg sec^-1 cm^-2 Hz^-1
#1 Jy = 1.51e7 photons sec^-1 m^-2 (dlambda/lambda)^-1
filter_specs = {
'U' : [0.36, 0.15, 1810],
'B' : [0.44, 0.22, 4260],
'V' : [0.55, 0.16, 3640],
'R' : [0.64, 0.23, 3080],
'I' : [0.79, 0.19, 2550],
'J' : [1.26, 0.16, 1600],
'H' : [1.60, 0.23, 1080],
'K' : [2.22, 0.23, 670],
'g' : [0.52, 0.14, 3730],
'r' : [0.67, 0.14, 4490],
'i' : [0.79, 0.16, 4760],
'z' : [0.91, 0.13, 4810]
}
if filt not in filter_specs:
print "Warning Filter "+filt+" not in database."
return 0.0
# variable names mag_flux_Jy '_Jy' --> mag_flux is *in* Janskys
# photon_Flux_per_Jy --> rate *per* Jansky
lamb, dlambdaOverLambda, mag0_flux_Jy = filter_specs[filt]
mag_flux_Jy = mag0_flux_Jy * 10**(-0.4*mag)
photonFlux_per_Jy = 1.51e7 * dlambdaOverLambda
mag_flux_phot = mag_flux_Jy * photonFlux_per_Jy * exptime * area
return mag_flux_phot # photons per s per m^2 (if no exptime,area given)
def table47a():
table = [
# cD cM cMp CF sL sR
[0, 0, 1, 0, 6288774, -20905355],
[2, 0, -1, 0, 1274027, -3699111],
[2, 0, 0, 0, 658314, -2955968],
[0, 0, 2, 0, 213618, -569925],
[0, 1, 0, 0, -185116, 48888],
[0, 0, 0, 2, -114332, -3149],
[2, 0, -2, 0, 58793, 246158],
[2, -1, -1, 0, 57066, -152138],
[2, 0, 1, 0, 53322, -170733],
[2, -1, 0, 0, 45758, -204586],
[0, 1, -1, 0, -40923, -129620],
[1, 0, 0, 0, -34720, 108743],
[0, 1, 1, 0, -30383, 104755],
[2, 0, 0, -2, 15327, 10321],
[0, 0, 1, 2, -12528, 0],
[0, 0, 1, -2, 10980, 79661],
[4, 0, -1, 0, 10675, -34782],
[0, 0, 3, 0, 10034, -23210],
[4, 0, -2, 0, 8548 , -21636],
[2, 1, -1, 0, -7888 , 24208],
[2, 1, 0, 0, -6766 , 30824],
[1, 0, -1, 0, -5163 , -8379],
[1, 1, 0, 0, 4987 , -16675],
[2, -1, 1, 0, 4036 , -12831],
[2, 0, 2, 0, 3994 , -10445],
[4, 0, 0, 0, 3861 , -11650],
[2, 0, -3, 0, 3665 , 14403],
[0, 1, -2, 0, -2689 , -7003],
[2, 0, -1, 2, -2602 , 0],
[2, -1, -2, 0, 2390 , 10056],
[1, 0, 1, 0, -2348 , 6322],
[2, -2, 0, 0, 2236 , -9884],
[0, 1, 2, 0, -2120 , 5751],
[0, 2, 0, 0, -2069 , 0],
[2, -2, -1, 0, 2048 , -4950],
[2, 0, 1, -2, -1773 , 4130],
[2, 0, 0, 2, -1595 , 0],
[4, -1, -1, 0, 1215 , -3958],
[0, 0, 2, 2, -1110 , 0],
[3, 0, -1, 0, -892 , 3258],
[2, 1, 1, 0, -810 , 2616],
[4, -1, -2, 0, 759 , -1897],
[0, 2, -1, 0, -713 , -2117],
[2, 2, -1, 0, -700 , 2354],
[2, 1, -2, 0, 691 , 0],
[2, -1, 0, -2, 596 , 0],
[4, 0, 1, 0, 549 , -1423],
[0, 0, 4, 0, 537 , -1117],
[4, -1, 0, 0, 520 , -1571],
[1, 0, -2, 0, -487 , -1739],
[2, 1, 0, -2, -399 , 0],
[0, 0, 2, -2, -381 , -4421],
[1, 1, 1, 0, 351 , 0],
[3, 0, -2, 0, -340 , 0],
[4, 0, -3, 0, 330 , 0],
[2, -1, 2, 0, 327 , 0],
[0, 2, 1, 0, -323 , 1165],
[1, 1, -1, 0, 299 , 0],
[2, 0, 3, 0, 294 , 0],
[2, 0, -1, -2, 0 , 8752],
]
return table
def table47b():
table = [
[0, 0, 0, 1, 5128122],
[0, 0, 1, 1, 280602],
[0, 0, 1, -1, 277693],
[2, 0, 0, -1, 173237],
[2, 0, -1, 1, 55413],
[2, 0, -1, -1, 46271],
[2, 0, 0, 1, 32573],
[0, 0, 2, 1, 17198],
[2, 0, 1, -1, 9266],
[0, 0, 2, -1, 8822],
[2, -1, 0, -1, 8216],
[2, 0, -2, -1, 4324],
[2, 0, 1, 1, 4200],
[2, 1, 0, -1, -3359],
[2, -1, -1, 1, 2463],
[2, -1, 0, 1, 2211],
[2, -1, -1, -1, 2065],
[0, 1, -1, -1, -1870],
[4, 0, -1, -1, 1828],
[0, 1, 0, 1, -1794],
[0, 0, 0, 3, -1749],
[0, 1, -1, 1, -1565],
[1, 0, 0, 1, -1491],
[0, 1, 1, 1, -1475],
[0, 1, 1, -1, -1410],
[0, 1, 0, -1, -1344],
[1, 0, 0, -1, -1335],
[0, 0, 3, 1, 1107],
[4, 0, 0, -1, 1021],
[4, 0, -1, 1, 833],
[0, 0, 1, -3, 777],
[4, 0, -2, 1, 671],
[2, 0, 0, -3, 607],
[2, 0, 2, -1, 596],
[2, -1, 1, -1, 491],
[1, 0, -2, 1, -451],
[0, 0, 3, -1, 439],
[2, 0, 2, 1, 422],
[2, 0, -3, -1, 421],
[2, 1, -1, 1, -366],
[2, 1, 0, 1, -351],
[4, 0, 0, 1, 331],
[2, -1, 1, 1, 315],
[2, -2, 0, -1, 302],
[0, 0, 1, 3, -283],
[2, 1, 1, -1, -229],
[1, 1, 0, -1, 223],
[1, 1, 0, 1, 223],
[0, 1, -2, -1, -220],
[2, 1, -1, -1, -220],
[1, 0, 1, 1, -185],
[2, -1, -2, -1, 181],
[0, 1, 2, 1, -177],
[4, 0, -2, -1, 176],
[4, -1, -1, -1, 166],
[1, 0, 1, -1, -164],
[4, 0, 1, -1, 132],
[1, 0, -1, -1, -119],
[4, -1, 0, -1, 115],
[2, -2, 0, 1, 107],
]
return table
if __name__ == '__main__':
JD = 2448724.5
lamb, beta, delt = lunarPosition(JD, debug=True)
f = moonIllumFrac(JD, isJD=True, debug=True)
cal = JD2calendar(JD)
jd = calendar2JD(*cal)
print "Calendar: ", cal
print "JD: ", jd
epoch = JD2epoch(JD)
jd = epoch2JD(epoch)
print "Epoch: ", epoch
print "JD: ", jd
print lamb, beta, delt
print f
|
divio/django-tinymce
|
refs/heads/master
|
setup.py
|
8
|
#!/usr/bin/env python
from distutils.core import setup
import metadata
app_name = metadata.name
version = metadata.version
setup(
name = "django-%s" % app_name,
version = version,
packages = [app_name, '%s.templatetags' % app_name],
package_data = {app_name: ['templates/tinymce/*']},
author = "Joost Cassee",
author_email = "joost@cassee.net",
description = "A Django application that contains a widget to render a" \
" form field as a TinyMCE editor.",
long_description = \
"""
Use the TinyMCE editor for your form textareas.
Features:
* Use as a form widget or with a view.
* Enhanced support for content languages.
* Integration with the TinyMCE spellchecker.
* Enables predefined link and image lists for dialogs.
* Can compress the TinyMCE javascript files.
* Integration with django-filebrowser.
""",
license = "MIT License",
keywords = "django widget tinymce",
classifiers = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
platforms = ['any'],
url = "http://code.google.com/p/django-%s/" % app_name,
download_url = "http://code.google.com/p/django-%s/downloads/list" \
% app_name,
)
|
vicky2135/lucious
|
refs/heads/master
|
oscar/lib/python2.7/site-packages/phonenumbers/data/region_CF.py
|
1
|
"""Auto-generated file, do not edit by hand. CF metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_CF = PhoneMetadata(id='CF', country_code=236, international_prefix='00',
general_desc=PhoneNumberDesc(national_number_pattern='[278]\\d{7}', possible_number_pattern='\\d{8}', possible_length=(8,)),
fixed_line=PhoneNumberDesc(national_number_pattern='2[12]\\d{6}', example_number='21612345', possible_length=(8,)),
mobile=PhoneNumberDesc(national_number_pattern='7[0257]\\d{6}', example_number='70012345', possible_length=(8,)),
toll_free=PhoneNumberDesc(),
premium_rate=PhoneNumberDesc(national_number_pattern='8776\\d{4}', possible_number_pattern='\\d{8}', example_number='87761234', possible_length=(8,)),
shared_cost=PhoneNumberDesc(),
personal_number=PhoneNumberDesc(),
voip=PhoneNumberDesc(),
pager=PhoneNumberDesc(),
uan=PhoneNumberDesc(),
voicemail=PhoneNumberDesc(),
no_international_dialling=PhoneNumberDesc(),
number_format=[NumberFormat(pattern='(\\d{2})(\\d{2})(\\d{2})(\\d{2})', format='\\1 \\2 \\3 \\4')])
|
ckaestne/CIDE
|
refs/heads/master
|
CIDE_Language_Python-test/testfiles/chunk.py
|
9
|
"""Simple class to read IFF chunks.
An IFF chunk (used in formats such as AIFF, TIFF, RMFF (RealMedia File
Format)) has the following structure:
+----------------+
| ID (4 bytes) |
+----------------+
| size (4 bytes) |
+----------------+
| data |
| ... |
+----------------+
The ID is a 4-byte string which identifies the type of chunk.
The size field (a 32-bit value, encoded using big-endian byte order)
gives the size of the whole chunk, including the 8-byte header.
Usually an IFF-type file consists of one or more chunks. The proposed
usage of the Chunk class defined here is to instantiate an instance at
the start of each chunk and read from the instance until it reaches
the end, after which a new instance can be instantiated. At the end
of the file, creating a new instance will fail with a EOFError
exception.
Usage:
while 1:
try:
chunk = Chunk(file)
except EOFError:
break
chunktype = chunk.getname()
while 1:
data = chunk.read(nbytes)
if not data:
pass
# do something with data
The interface is file-like. The implemented methods are:
read, close, seek, tell, isatty.
Extra methods are: skip() (called by close, skips to the end of the chunk),
getname() (returns the name (ID) of the chunk)
The __init__ method has one required argument, a file-like object
(including a chunk instance), and one optional argument, a flag which
specifies whether or not chunks are aligned on 2-byte boundaries. The
default is 1, i.e. aligned.
"""
class Chunk:
def __init__(self, file, align = 1, bigendian = 1, inclheader = 0):
import struct
self.closed = 0
self.align = align # whether to align to word (2-byte) boundaries
if bigendian:
strflag = '>'
else:
strflag = '<'
self.file = file
self.chunkname = file.read(4)
if len(self.chunkname) < 4:
raise EOFError
try:
self.chunksize = struct.unpack(strflag+'l', file.read(4))[0]
except struct.error:
raise EOFError
if inclheader:
self.chunksize = self.chunksize - 8 # subtract header
self.size_read = 0
try:
self.offset = self.file.tell()
except (AttributeError, IOError):
self.seekable = 0
else:
self.seekable = 1
def getname(self):
"""Return the name (ID) of the current chunk."""
return self.chunkname
def getsize(self):
"""Return the size of the current chunk."""
return self.chunksize
def close(self):
if not self.closed:
self.skip()
self.closed = 1
def isatty(self):
if self.closed:
raise ValueError, "I/O operation on closed file"
return 0
def seek(self, pos, whence = 0):
"""Seek to specified position into the chunk.
Default position is 0 (start of chunk).
If the file is not seekable, this will result in an error.
"""
if self.closed:
raise ValueError, "I/O operation on closed file"
if not self.seekable:
raise IOError, "cannot seek"
if whence == 1:
pos = pos + self.size_read
elif whence == 2:
pos = pos + self.chunksize
if pos < 0 or pos > self.chunksize:
raise RuntimeError
self.file.seek(self.offset + pos, 0)
self.size_read = pos
def tell(self):
if self.closed:
raise ValueError, "I/O operation on closed file"
return self.size_read
def read(self, size = -1):
"""Read at most size bytes from the chunk.
If size is omitted or negative, read until the end
of the chunk.
"""
if self.closed:
raise ValueError, "I/O operation on closed file"
if self.size_read >= self.chunksize:
return ''
if size < 0:
size = self.chunksize - self.size_read
if size > self.chunksize - self.size_read:
size = self.chunksize - self.size_read
data = self.file.read(size)
self.size_read = self.size_read + len(data)
if self.size_read == self.chunksize and \
self.align and \
(self.chunksize & 1):
dummy = self.file.read(1)
self.size_read = self.size_read + len(dummy)
return data
def skip(self):
"""Skip the rest of the chunk.
If you are not interested in the contents of the chunk,
this method should be called so that the file points to
the start of the next chunk.
"""
if self.closed:
raise ValueError, "I/O operation on closed file"
if self.seekable:
try:
n = self.chunksize - self.size_read
# maybe fix alignment
if self.align and (self.chunksize & 1):
n = n + 1
self.file.seek(n, 1)
self.size_read = self.size_read + n
return
except IOError:
pass
while self.size_read < self.chunksize:
n = min(8192, self.chunksize - self.size_read)
dummy = self.read(n)
if not dummy:
raise EOFError
|
abhikumar22/MYBLOG
|
refs/heads/master
|
blg/Lib/encodings/mac_greek.py
|
272
|
""" Python Character Mapping Codec mac_greek generated from 'MAPPINGS/VENDORS/APPLE/GREEK.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-greek',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> CONTROL CHARACTER
'\x01' # 0x01 -> CONTROL CHARACTER
'\x02' # 0x02 -> CONTROL CHARACTER
'\x03' # 0x03 -> CONTROL CHARACTER
'\x04' # 0x04 -> CONTROL CHARACTER
'\x05' # 0x05 -> CONTROL CHARACTER
'\x06' # 0x06 -> CONTROL CHARACTER
'\x07' # 0x07 -> CONTROL CHARACTER
'\x08' # 0x08 -> CONTROL CHARACTER
'\t' # 0x09 -> CONTROL CHARACTER
'\n' # 0x0A -> CONTROL CHARACTER
'\x0b' # 0x0B -> CONTROL CHARACTER
'\x0c' # 0x0C -> CONTROL CHARACTER
'\r' # 0x0D -> CONTROL CHARACTER
'\x0e' # 0x0E -> CONTROL CHARACTER
'\x0f' # 0x0F -> CONTROL CHARACTER
'\x10' # 0x10 -> CONTROL CHARACTER
'\x11' # 0x11 -> CONTROL CHARACTER
'\x12' # 0x12 -> CONTROL CHARACTER
'\x13' # 0x13 -> CONTROL CHARACTER
'\x14' # 0x14 -> CONTROL CHARACTER
'\x15' # 0x15 -> CONTROL CHARACTER
'\x16' # 0x16 -> CONTROL CHARACTER
'\x17' # 0x17 -> CONTROL CHARACTER
'\x18' # 0x18 -> CONTROL CHARACTER
'\x19' # 0x19 -> CONTROL CHARACTER
'\x1a' # 0x1A -> CONTROL CHARACTER
'\x1b' # 0x1B -> CONTROL CHARACTER
'\x1c' # 0x1C -> CONTROL CHARACTER
'\x1d' # 0x1D -> CONTROL CHARACTER
'\x1e' # 0x1E -> CONTROL CHARACTER
'\x1f' # 0x1F -> CONTROL CHARACTER
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> CONTROL CHARACTER
'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xb9' # 0x81 -> SUPERSCRIPT ONE
'\xb2' # 0x82 -> SUPERSCRIPT TWO
'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xb3' # 0x84 -> SUPERSCRIPT THREE
'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u0385' # 0x87 -> GREEK DIALYTIKA TONOS
'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
'\u0384' # 0x8B -> GREEK TONOS
'\xa8' # 0x8C -> DIAERESIS
'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
'\xa3' # 0x92 -> POUND SIGN
'\u2122' # 0x93 -> TRADE MARK SIGN
'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
'\u2022' # 0x96 -> BULLET
'\xbd' # 0x97 -> VULGAR FRACTION ONE HALF
'\u2030' # 0x98 -> PER MILLE SIGN
'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
'\xa6' # 0x9B -> BROKEN BAR
'\u20ac' # 0x9C -> EURO SIGN # before Mac OS 9.2.2, was SOFT HYPHEN
'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
'\u2020' # 0xA0 -> DAGGER
'\u0393' # 0xA1 -> GREEK CAPITAL LETTER GAMMA
'\u0394' # 0xA2 -> GREEK CAPITAL LETTER DELTA
'\u0398' # 0xA3 -> GREEK CAPITAL LETTER THETA
'\u039b' # 0xA4 -> GREEK CAPITAL LETTER LAMDA
'\u039e' # 0xA5 -> GREEK CAPITAL LETTER XI
'\u03a0' # 0xA6 -> GREEK CAPITAL LETTER PI
'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
'\xae' # 0xA8 -> REGISTERED SIGN
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\u03a3' # 0xAA -> GREEK CAPITAL LETTER SIGMA
'\u03aa' # 0xAB -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
'\xa7' # 0xAC -> SECTION SIGN
'\u2260' # 0xAD -> NOT EQUAL TO
'\xb0' # 0xAE -> DEGREE SIGN
'\xb7' # 0xAF -> MIDDLE DOT
'\u0391' # 0xB0 -> GREEK CAPITAL LETTER ALPHA
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
'\xa5' # 0xB4 -> YEN SIGN
'\u0392' # 0xB5 -> GREEK CAPITAL LETTER BETA
'\u0395' # 0xB6 -> GREEK CAPITAL LETTER EPSILON
'\u0396' # 0xB7 -> GREEK CAPITAL LETTER ZETA
'\u0397' # 0xB8 -> GREEK CAPITAL LETTER ETA
'\u0399' # 0xB9 -> GREEK CAPITAL LETTER IOTA
'\u039a' # 0xBA -> GREEK CAPITAL LETTER KAPPA
'\u039c' # 0xBB -> GREEK CAPITAL LETTER MU
'\u03a6' # 0xBC -> GREEK CAPITAL LETTER PHI
'\u03ab' # 0xBD -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
'\u03a8' # 0xBE -> GREEK CAPITAL LETTER PSI
'\u03a9' # 0xBF -> GREEK CAPITAL LETTER OMEGA
'\u03ac' # 0xC0 -> GREEK SMALL LETTER ALPHA WITH TONOS
'\u039d' # 0xC1 -> GREEK CAPITAL LETTER NU
'\xac' # 0xC2 -> NOT SIGN
'\u039f' # 0xC3 -> GREEK CAPITAL LETTER OMICRON
'\u03a1' # 0xC4 -> GREEK CAPITAL LETTER RHO
'\u2248' # 0xC5 -> ALMOST EQUAL TO
'\u03a4' # 0xC6 -> GREEK CAPITAL LETTER TAU
'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
'\xa0' # 0xCA -> NO-BREAK SPACE
'\u03a5' # 0xCB -> GREEK CAPITAL LETTER UPSILON
'\u03a7' # 0xCC -> GREEK CAPITAL LETTER CHI
'\u0386' # 0xCD -> GREEK CAPITAL LETTER ALPHA WITH TONOS
'\u0388' # 0xCE -> GREEK CAPITAL LETTER EPSILON WITH TONOS
'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
'\u2013' # 0xD0 -> EN DASH
'\u2015' # 0xD1 -> HORIZONTAL BAR
'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
'\xf7' # 0xD6 -> DIVISION SIGN
'\u0389' # 0xD7 -> GREEK CAPITAL LETTER ETA WITH TONOS
'\u038a' # 0xD8 -> GREEK CAPITAL LETTER IOTA WITH TONOS
'\u038c' # 0xD9 -> GREEK CAPITAL LETTER OMICRON WITH TONOS
'\u038e' # 0xDA -> GREEK CAPITAL LETTER UPSILON WITH TONOS
'\u03ad' # 0xDB -> GREEK SMALL LETTER EPSILON WITH TONOS
'\u03ae' # 0xDC -> GREEK SMALL LETTER ETA WITH TONOS
'\u03af' # 0xDD -> GREEK SMALL LETTER IOTA WITH TONOS
'\u03cc' # 0xDE -> GREEK SMALL LETTER OMICRON WITH TONOS
'\u038f' # 0xDF -> GREEK CAPITAL LETTER OMEGA WITH TONOS
'\u03cd' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH TONOS
'\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA
'\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA
'\u03c8' # 0xE3 -> GREEK SMALL LETTER PSI
'\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA
'\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON
'\u03c6' # 0xE6 -> GREEK SMALL LETTER PHI
'\u03b3' # 0xE7 -> GREEK SMALL LETTER GAMMA
'\u03b7' # 0xE8 -> GREEK SMALL LETTER ETA
'\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA
'\u03be' # 0xEA -> GREEK SMALL LETTER XI
'\u03ba' # 0xEB -> GREEK SMALL LETTER KAPPA
'\u03bb' # 0xEC -> GREEK SMALL LETTER LAMDA
'\u03bc' # 0xED -> GREEK SMALL LETTER MU
'\u03bd' # 0xEE -> GREEK SMALL LETTER NU
'\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON
'\u03c0' # 0xF0 -> GREEK SMALL LETTER PI
'\u03ce' # 0xF1 -> GREEK SMALL LETTER OMEGA WITH TONOS
'\u03c1' # 0xF2 -> GREEK SMALL LETTER RHO
'\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA
'\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU
'\u03b8' # 0xF5 -> GREEK SMALL LETTER THETA
'\u03c9' # 0xF6 -> GREEK SMALL LETTER OMEGA
'\u03c2' # 0xF7 -> GREEK SMALL LETTER FINAL SIGMA
'\u03c7' # 0xF8 -> GREEK SMALL LETTER CHI
'\u03c5' # 0xF9 -> GREEK SMALL LETTER UPSILON
'\u03b6' # 0xFA -> GREEK SMALL LETTER ZETA
'\u03ca' # 0xFB -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
'\u03cb' # 0xFC -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
'\u0390' # 0xFD -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
'\u03b0' # 0xFE -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
'\xad' # 0xFF -> SOFT HYPHEN # before Mac OS 9.2.2, was undefined
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
mszewczy/odoo
|
refs/heads/8.0
|
openerp/addons/test_documentation_examples/delegation.py
|
366
|
# -*- coding: utf-8 -*-
from openerp import models, fields
class Child0(models.Model):
_name = 'delegation.child0'
field_0 = fields.Integer()
class Child1(models.Model):
_name = 'delegation.child1'
field_1 = fields.Integer()
class Delegating(models.Model):
_name = 'delegation.parent'
_inherits = {
'delegation.child0': 'child0_id',
'delegation.child1': 'child1_id',
}
child0_id = fields.Many2one('delegation.child0', required=True, ondelete='cascade')
child1_id = fields.Many2one('delegation.child1', required=True, ondelete='cascade')
|
MalloyPower/parsing-python
|
refs/heads/master
|
front-end/testsuite-python-lib/Python-3.0/Lib/idlelib/AutoComplete.py
|
1
|
"""AutoComplete.py - An IDLE extension for automatically completing names.
This extension can complete either attribute names of file names. It can pop
a window with all available names, for the user to select from.
"""
import os
import sys
import string
from idlelib.configHandler import idleConf
# This string includes all chars that may be in a file name (without a path
# separator)
FILENAME_CHARS = string.ascii_letters + string.digits + os.curdir + "._~#$:-"
# This string includes all chars that may be in an identifier
ID_CHARS = string.ascii_letters + string.digits + "_"
# These constants represent the two different types of completions
COMPLETE_ATTRIBUTES, COMPLETE_FILES = range(1, 2+1)
from idlelib import AutoCompleteWindow
from idlelib.HyperParser import HyperParser
import __main__
SEPS = os.sep
if os.altsep: # e.g. '/' on Windows...
SEPS += os.altsep
class AutoComplete:
menudefs = [
('edit', [
("Show Completions", "<<force-open-completions>>"),
])
]
popupwait = idleConf.GetOption("extensions", "AutoComplete",
"popupwait", type="int", default=0)
def __init__(self, editwin=None):
self.editwin = editwin
if editwin is None: # subprocess and test
return
self.text = editwin.text
self.autocompletewindow = None
# id of delayed call, and the index of the text insert when the delayed
# call was issued. If _delayed_completion_id is None, there is no
# delayed call.
self._delayed_completion_id = None
self._delayed_completion_index = None
def _make_autocomplete_window(self):
return AutoCompleteWindow.AutoCompleteWindow(self.text)
def _remove_autocomplete_window(self, event=None):
if self.autocompletewindow:
self.autocompletewindow.hide_window()
self.autocompletewindow = None
def force_open_completions_event(self, event):
"""Happens when the user really wants to open a completion list, even
if a function call is needed.
"""
self.open_completions(True, False, True)
def try_open_completions_event(self, event):
"""Happens when it would be nice to open a completion list, but not
really neccesary, for example after an dot, so function
calls won't be made.
"""
lastchar = self.text.get("insert-1c")
if lastchar == ".":
self._open_completions_later(False, False, False,
COMPLETE_ATTRIBUTES)
elif lastchar in SEPS:
self._open_completions_later(False, False, False,
COMPLETE_FILES)
def autocomplete_event(self, event):
"""Happens when the user wants to complete his word, and if neccesary,
open a completion list after that (if there is more than one
completion)
"""
if hasattr(event, "mc_state") and event.mc_state:
# A modifier was pressed along with the tab, continue as usual.
return
if self.autocompletewindow and self.autocompletewindow.is_active():
self.autocompletewindow.complete()
return "break"
else:
opened = self.open_completions(False, True, True)
if opened:
return "break"
def _open_completions_later(self, *args):
self._delayed_completion_index = self.text.index("insert")
if self._delayed_completion_id is not None:
self.text.after_cancel(self._delayed_completion_id)
self._delayed_completion_id = \
self.text.after(self.popupwait, self._delayed_open_completions,
*args)
def _delayed_open_completions(self, *args):
self._delayed_completion_id = None
if self.text.index("insert") != self._delayed_completion_index:
return
self.open_completions(*args)
def open_completions(self, evalfuncs, complete, userWantsWin, mode=None):
"""Find the completions and create the AutoCompleteWindow.
Return True if successful (no syntax error or so found).
if complete is True, then if there's nothing to complete and no
start of completion, won't open completions and return False.
If mode is given, will open a completion list only in this mode.
"""
# Cancel another delayed call, if it exists.
if self._delayed_completion_id is not None:
self.text.after_cancel(self._delayed_completion_id)
self._delayed_completion_id = None
hp = HyperParser(self.editwin, "insert")
curline = self.text.get("insert linestart", "insert")
i = j = len(curline)
if hp.is_in_string() and (not mode or mode==COMPLETE_FILES):
self._remove_autocomplete_window()
mode = COMPLETE_FILES
while i and curline[i-1] in FILENAME_CHARS:
i -= 1
comp_start = curline[i:j]
j = i
while i and curline[i-1] in FILENAME_CHARS + SEPS:
i -= 1
comp_what = curline[i:j]
elif hp.is_in_code() and (not mode or mode==COMPLETE_ATTRIBUTES):
self._remove_autocomplete_window()
mode = COMPLETE_ATTRIBUTES
while i and curline[i-1] in ID_CHARS:
i -= 1
comp_start = curline[i:j]
if i and curline[i-1] == '.':
hp.set_index("insert-%dc" % (len(curline)-(i-1)))
comp_what = hp.get_expression()
if not comp_what or \
(not evalfuncs and comp_what.find('(') != -1):
return
else:
comp_what = ""
else:
return
if complete and not comp_what and not comp_start:
return
comp_lists = self.fetch_completions(comp_what, mode)
if not comp_lists[0]:
return
self.autocompletewindow = self._make_autocomplete_window()
self.autocompletewindow.show_window(comp_lists,
"insert-%dc" % len(comp_start),
complete,
mode,
userWantsWin)
return True
def fetch_completions(self, what, mode):
"""Return a pair of lists of completions for something. The first list
is a sublist of the second. Both are sorted.
If there is a Python subprocess, get the comp. list there. Otherwise,
either fetch_completions() is running in the subprocess itself or it
was called in an IDLE EditorWindow before any script had been run.
The subprocess environment is that of the most recently run script. If
two unrelated modules are being edited some calltips in the current
module may be inoperative if the module was not the last to run.
"""
try:
rpcclt = self.editwin.flist.pyshell.interp.rpcclt
except:
rpcclt = None
if rpcclt:
return rpcclt.remotecall("exec", "get_the_completion_list",
(what, mode), {})
else:
if mode == COMPLETE_ATTRIBUTES:
if what == "":
namespace = __main__.__dict__.copy()
namespace.update(__main__.__builtins__.__dict__)
bigl = eval("dir()", namespace)
bigl.sort()
if "__all__" in bigl:
smalll = eval("__all__", namespace)
smalll.sort()
else:
smalll = [s for s in bigl if s[:1] != '_']
else:
try:
entity = self.get_entity(what)
bigl = dir(entity)
bigl.sort()
if "__all__" in bigl:
smalll = entity.__all__
smalll.sort()
else:
smalll = [s for s in bigl if s[:1] != '_']
except:
return [], []
elif mode == COMPLETE_FILES:
if what == "":
what = "."
try:
expandedpath = os.path.expanduser(what)
bigl = os.listdir(expandedpath)
bigl.sort()
smalll = [s for s in bigl if s[:1] != '.']
except OSError:
return [], []
if not smalll:
smalll = bigl
return smalll, bigl
def get_entity(self, name):
"""Lookup name in a namespace spanning sys.modules and __main.dict__"""
namespace = sys.modules.copy()
namespace.update(__main__.__dict__)
return eval(name, namespace)
|
resmo/ansible
|
refs/heads/devel
|
lib/ansible/modules/network/avi/avi_clusterclouddetails.py
|
28
|
#!/usr/bin/python
#
# @author: Gaurav Rastogi (grastogi@avinetworks.com)
# Eric Anderson (eanderson@avinetworks.com)
# module_check: supported
#
# Copyright: (c) 2017 Gaurav Rastogi, <grastogi@avinetworks.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: avi_clusterclouddetails
author: Gaurav Rastogi (@grastogi23) <grastogi@avinetworks.com>
short_description: Module for setup of ClusterCloudDetails Avi RESTful Object
description:
- This module is used to configure ClusterCloudDetails object
- more examples at U(https://github.com/avinetworks/devops)
requirements: [ avisdk ]
version_added: "2.5"
options:
state:
description:
- The state that should be applied on the entity.
default: present
choices: ["absent", "present"]
avi_api_update_method:
description:
- Default method for object update is HTTP PUT.
- Setting to patch will override that behavior to use HTTP PATCH.
version_added: "2.5"
default: put
choices: ["put", "patch"]
avi_api_patch_op:
description:
- Patch operation to use when using avi_api_update_method as patch.
version_added: "2.5"
choices: ["add", "replace", "delete"]
azure_info:
description:
- Azure info to configure cluster_vip on the controller.
- Field introduced in 17.2.5.
name:
description:
- Field introduced in 17.2.5.
required: true
tenant_ref:
description:
- It is a reference to an object of type tenant.
- Field introduced in 17.2.5.
url:
description:
- Avi controller URL of the object.
uuid:
description:
- Field introduced in 17.2.5.
extends_documentation_fragment:
- avi
'''
EXAMPLES = """
- name: Example to create ClusterCloudDetails object
avi_clusterclouddetails:
controller: 10.10.25.42
username: admin
password: something
state: present
name: sample_clusterclouddetails
"""
RETURN = '''
obj:
description: ClusterCloudDetails (api/clusterclouddetails) object
returned: success, changed
type: dict
'''
from ansible.module_utils.basic import AnsibleModule
try:
from ansible.module_utils.network.avi.avi import (
avi_common_argument_spec, avi_ansible_api, HAS_AVI)
except ImportError:
HAS_AVI = False
def main():
argument_specs = dict(
state=dict(default='present',
choices=['absent', 'present']),
avi_api_update_method=dict(default='put',
choices=['put', 'patch']),
avi_api_patch_op=dict(choices=['add', 'replace', 'delete']),
azure_info=dict(type='dict',),
name=dict(type='str', required=True),
tenant_ref=dict(type='str',),
url=dict(type='str',),
uuid=dict(type='str',),
)
argument_specs.update(avi_common_argument_spec())
module = AnsibleModule(
argument_spec=argument_specs, supports_check_mode=True)
if not HAS_AVI:
return module.fail_json(msg=(
'Avi python API SDK (avisdk>=17.1) or requests is not installed. '
'For more details visit https://github.com/avinetworks/sdk.'))
return avi_ansible_api(module, 'clusterclouddetails',
set([]))
if __name__ == '__main__':
main()
|
kennedyshead/home-assistant
|
refs/heads/dev
|
tests/components/lock/test_significant_change.py
|
7
|
"""Test the Lock significant change platform."""
from homeassistant.components.lock.significant_change import (
async_check_significant_change,
)
async def test_significant_change():
"""Detect Lock significant changes."""
old_attrs = {"attr_1": "a"}
new_attrs = {"attr_1": "b"}
assert (
async_check_significant_change(None, "locked", old_attrs, "locked", old_attrs)
is False
)
assert (
async_check_significant_change(None, "locked", old_attrs, "locked", new_attrs)
is False
)
assert (
async_check_significant_change(None, "locked", old_attrs, "unlocked", old_attrs)
is True
)
|
arcade-lab/tia-infrastructure
|
refs/heads/master
|
tools/parameters/core_parameters.py
|
1
|
"""
Classes and utilities for encapsulating physical constraints of target hardware and simulator.
"""
import sys
import numpy as np
from assembly.instruction import Op, SourceType, DestinationType
from parameters.exception import ParametrizationException
class CoreParameters:
"""
Parameters for the physical processing element core target of the assembler and simulator.
"""
def __init__(self, architecture=None, device_word_width=None, immediate_width=None, mm_instruction_width=None,
num_instructions=None, num_predicates=None, num_registers=None, has_multiplier=None,
has_two_word_product_multiplier=None, has_scratchpad=None, num_scratchpad_words=None,
latch_based_instruction_memory=None, ram_based_immediate_storage=None, num_input_channels=None,
num_output_channels=None, channel_buffer_depth=None, max_num_input_channels_to_check=None,
num_tags=None, has_speculative_predicate_unit=None, has_effective_queue_status=None,
has_debug_monitor=None, has_performance_counters=None):
"""
Generic initializer for a CoreParameters instance. Likely only to be used through alternative constructors.
:param architecture: device type
:param device_word_width: data word width for computation
:param immediate_width: immediate width per instruction
:param mm_instruction_width: number of memory mapped bits available for the instruction
:param num_instructions: number of instructions available to be stored in the core
:param num_predicates: number of predicate registers
:param num_registers: number of general purpose data registers
:param has_multiplier: whether we have an integer multiplier
:param has_two_word_product_multiplier: whether we have a full high/low two-word product
:param has_scratchpad: whether we have a private scratchpad memory
:param num_scratchpad_words: number of words in the scratchpad memory
:param latch_based_instruction_memory: whether to use latches for the instruction memory instead of flip-flops
:param ram_based_immediate_storage: whether to use a small RAM to store the immediates of the instructions
:param num_input_channels: number of channels coming from the interconnect into the core
:param num_output_channels: number of channels going to the interconnect from the core
:param channel_buffer_depth: depth of input and output channel buffers
:param max_num_input_channels_to_check: how many channels and instruction can depend on in this architecture
:param num_tags: number of different tag types supported by the architecture
:param has_speculative_predicate_unit: whether to use speculation to keep the pipeline full
:param has_effective_queue_status: whether to use detailed queue accounting
:param has_debug_monitor: whether to include a debug monitor that can read predicate and register information
:param has_performance_counters: whether to include performance counters
"""
# Generic inializer.
self.architecture = architecture
self.device_word_width = device_word_width
self.immediate_width = immediate_width
self.mm_instruction_width = mm_instruction_width
self.num_instructions = num_instructions
self.num_predicates = num_predicates
self.num_registers = num_registers
self.has_multiplier = has_multiplier
self.has_two_word_product_multiplier = has_two_word_product_multiplier
self.has_scratchpad = has_scratchpad
self.num_scratchpad_words = num_scratchpad_words
self.latch_based_instruction_memory = latch_based_instruction_memory
self.ram_based_immediate_storage = ram_based_immediate_storage
self.num_input_channels = num_input_channels
self.num_output_channels = num_output_channels
self.channel_buffer_depth = channel_buffer_depth
self.max_num_input_channels_to_check = max_num_input_channels_to_check
self.num_tags = num_tags
self.has_speculative_predicate_unit = has_speculative_predicate_unit
self.has_effective_queue_status = has_effective_queue_status
self.has_debug_monitor = has_debug_monitor
self.has_performance_counters = has_performance_counters
# --- Alternative Constructor ---
@classmethod
def from_dictionary(cls, dictionary):
"""
Instantiate a CoreParameters wrapper from a dictionary.
:param dictionary: loaded from a configuration file or elsewhere
:return: new CoreParameters instance
"""
# Filter the dictionary with only parameters necessary for the initializer.
key_filter_set = {"architecture",
"device_word_width",
"immediate_width",
"mm_instruction_width",
"num_instructions",
"num_predicates",
"num_registers",
"has_multiplier",
"has_two_word_product_multiplier",
"has_scratchpad",
"num_scratchpad_words",
"latch_based_instruction_memory",
"ram_based_immediate_storage",
"num_input_channels",
"num_output_channels",
"channel_buffer_depth",
"max_num_input_channels_to_check",
"num_tags",
"has_speculative_predicate_unit",
"has_effective_queue_status",
"has_debug_monitor",
"has_performance_counters"}
filtered_core_dictionary = {key: dictionary[key] for key in key_filter_set}
# Unpack the dictionary into the initializer.
return cls(**filtered_core_dictionary)
# --- Check on the Validity of Properties ---
def validate_instruction_format(self):
"""
Raise an error if the architectural specification is incomplete and an attempt is made to access derived
properties.
"""
# Make sure all attributes are set.
valid = True
for key in self.__dict__:
if self.__dict__[key] is None:
valid = False
break
if not valid:
print(self.__dict__, file=sys.stderr)
exception_string = f"The parameter {key} must be nonnull."
raise ParametrizationException(exception_string)
# Get the total number of bits for output checking purposes.
non_immediate_instruction_field_bit_counts = [1, # vi.
self.ptm_width,
self.ici_width,
self.ictb_width,
self.ictv_width,
self.op_width,
self.st_width,
self.si_width,
self.dt_width,
self.di_width,
self.oci_width,
self.oct_width,
self.icd_width,
self.pum_width]
non_immediate_instruction_width = sum(non_immediate_instruction_field_bit_counts)
# Make sure the proposed instruction encoding can actually fit.
used_instruction_code_space = non_immediate_instruction_width + self.immediate_width
if used_instruction_code_space > self.mm_instruction_width:
exception_string = f"The instruction with the given architectural parameters has a width of " \
+ f"{used_instruction_code_space} bits and cannot fit within the defined " \
+ f"memory-mapped instruction width of {self.mm_instruction_width} bits."
raise ParametrizationException(exception_string)
if used_instruction_code_space > self.phy_instruction_width:
exception_string = f"The instruction with the given architectural parameters has a width of " \
+ f"{used_instruction_code_space} bits and cannot fit within the defined physical " \
+ f"instruction width of {self.phy_instruction_width} bits."
raise ParametrizationException(exception_string)
# --- Derived Properties ---
@property
def true_ptm_width(self):
return self.num_predicates
@property
def false_ptm_width(self):
return self.num_predicates
@property
def ptm_width(self):
return self.true_ptm_width + self.false_ptm_width
@property
def single_ici_width(self):
return int(np.ceil(np.log2(self.num_input_channels + 1))) # Extra slot for the implied null value.
@property
def ici_width(self):
return self.max_num_input_channels_to_check * self.single_ici_width
@property
def tag_width(self):
return int(np.ceil(np.log2(self.num_tags)))
@property
def ictb_width(self):
return self.max_num_input_channels_to_check
@property
def ictv_width(self):
return self.max_num_input_channels_to_check * self.tag_width
@property
def op_width(self):
return int(np.ceil(np.log2(len(Op))))
@property
def single_st_width(self):
return int(np.ceil(np.log2(len(SourceType)))) # SourceType already has an explicit null value.
@property
def st_width(self):
return 3 * self.single_st_width
@property
def single_si_width(self):
return int(np.ceil(np.log2(max(self.num_registers, self.num_input_channels))))
@property
def si_width(self):
return 3 * self.single_si_width
@property
def dt_width(self):
return int(np.ceil(np.log2(len(DestinationType)))) # DestinationType already has an explicit null value.
@property
def di_width(self):
return int(np.ceil(np.log2(max(self.num_registers, self.num_output_channels, self.num_predicates))))
@property
def oci_width(self):
return self.num_output_channels
@property
def oct_width(self):
return self.tag_width
@property
def icd_width(self):
return self.num_input_channels
@property
def true_pum_width(self):
return self.true_ptm_width
@property
def false_pum_width(self):
return self.false_ptm_width
@property
def pum_width(self):
return self.true_pum_width + self.false_pum_width
@property
def non_immediate_instruction_width(self):
non_immediate_instruction_widths = [1, # vi.
self.ptm_width,
self.ici_width,
self.ictb_width,
self.ictv_width,
self.op_width,
self.st_width,
self.si_width,
self.dt_width,
self.di_width,
self.oci_width,
self.oct_width,
self.icd_width,
self.pum_width]
return sum(non_immediate_instruction_widths)
@property
def phy_instruction_width(self):
return self.non_immediate_instruction_width + self.immediate_width
@property
def padding_width(self):
return self.mm_instruction_width - self.phy_instruction_width
|
alexryndin/ambari
|
refs/heads/branch-adh-1.5
|
ambari-server/src/test/python/custom_actions/test_ru_set_all.py
|
1
|
# !/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
# Python Imports
import os
import json
from mock.mock import patch
from mock.mock import MagicMock
# Module imports
import subprocess
from stacks.utils.RMFTestCase import *
from resource_management import Script, ConfigDictionary
from resource_management.libraries.functions.default import default
from resource_management.libraries.functions import conf_select
from resource_management.core.logger import Logger
from ambari_agent.AmbariConfig import AmbariConfig
from ambari_agent.FileCache import FileCache
from ambari_commons.os_check import OSCheck
from resource_management.core import shell
from resource_management.core.environment import Environment
import pprint
def fake_call(command, **kwargs):
"""
Instead of shell.call, call a command whose output equals the command.
:param command: Command that will be echoed.
:return: Returns a tuple of (process output code, output)
"""
return (0, str(command))
class TestRUSetAll(RMFTestCase):
def get_custom_actions_dir(self):
return os.path.join(self.get_src_folder(), "test/resources/custom_actions/")
@patch.object(Logger, "info")
@patch.object(Logger, "error")
def setUp(self, error_mock, info_mock):
Logger.logger = MagicMock()
# Import the class under test. This is done here as opposed to the rest of the imports because the get_os_type()
# method needs to be patched first.
from ru_set_all import UpgradeSetAll
global UpgradeSetAll
from ru_set_all import link_config
global link_config
def tearDown(self):
Logger.logger = None
@patch("os.path.exists")
@patch("resource_management.core.shell.call")
@patch.object(Script, 'get_config')
@patch.object(OSCheck, 'is_redhat_family')
def test_execution(self, family_mock, get_config_mock, call_mock, exists_mock):
# Mock the config objects
json_file_path = os.path.join(self.get_custom_actions_dir(), "ru_execute_tasks_namenode_prepare.json")
self.assertTrue(os.path.isfile(json_file_path))
with open(json_file_path, "r") as json_file:
json_payload = json.load(json_file)
json_payload["configurations"]["cluster-env"]["stack_tools"] = self.get_stack_tools()
json_payload["configurations"]["cluster-env"]["stack_features"] = self.get_stack_features()
config_dict = ConfigDictionary(json_payload)
family_mock.return_value = True
get_config_mock.return_value = config_dict
call_mock.side_effect = fake_call # echo the command
exists_mock.return_value = True
# Ensure that the json file was actually read.
stack_name = default("/hostLevelParams/stack_name", None)
stack_version = default("/hostLevelParams/stack_version", None)
service_package_folder = default('/roleParams/service_package_folder', None)
self.assertEqual(stack_name, "HDP")
self.assertEqual(stack_version, '2.2')
self.assertEqual(service_package_folder, "common-services/HDFS/2.1.0.2.0/package")
# Begin the test
ru_execute = UpgradeSetAll()
ru_execute.actionexecute(None)
call_mock.assert_called_with(('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'all', u'2.2.1.0-2260'), sudo=True)
@patch("os.path.exists")
@patch("resource_management.core.shell.call")
@patch.object(Script, 'get_config')
@patch.object(OSCheck, 'is_redhat_family')
@patch("ru_set_all.link_config")
def test_execution_23(self, link_mock, family_mock, get_config_mock, call_mock, exists_mock):
# Mock the config objects
json_file_path = os.path.join(self.get_custom_actions_dir(), "ru_execute_tasks_namenode_prepare.json")
self.assertTrue(os.path.isfile(json_file_path))
with open(json_file_path, "r") as json_file:
json_payload = json.load(json_file)
json_payload['hostLevelParams']['stack_version'] = "2.3"
json_payload['commandParams']['version'] = "2.3.0.0-1234"
json_payload["configurations"]["cluster-env"]["stack_tools"] = self.get_stack_tools()
json_payload["configurations"]["cluster-env"]["stack_features"] = self.get_stack_features()
config_dict = ConfigDictionary(json_payload)
family_mock.return_value = True
get_config_mock.return_value = config_dict
call_mock.side_effect = fake_call # echo the command
exists_mock.return_value = True
# Ensure that the json file was actually read.
stack_name = default("/hostLevelParams/stack_name", None)
stack_version = default("/hostLevelParams/stack_version", None)
service_package_folder = default('/roleParams/service_package_folder', None)
self.assertEqual(stack_name, "HDP")
self.assertEqual(stack_version, '2.3')
self.assertEqual(service_package_folder, "common-services/HDFS/2.1.0.2.0/package")
# Begin the test
ru_execute = UpgradeSetAll()
ru_execute.actionexecute(None)
self.assertTrue(link_mock.called)
call_mock.assert_called_with(('ambari-python-wrap', '/usr/bin/hdp-select', 'set', 'all', '2.3.0.0-1234'), sudo=True)
@patch("os.path.exists")
@patch("resource_management.core.shell.call")
@patch.object(Script, 'get_config')
@patch.object(OSCheck, 'is_redhat_family')
def test_skippable_hosts(self, family_mock, get_config_mock, call_mock, exists_mock):
"""
Tests that hosts are skippable if they don't have stack components installed
:return:
"""
# Mock the config objects
json_file_path = os.path.join(self.get_custom_actions_dir(),
"ru_execute_tasks_namenode_prepare.json")
self.assertTrue(os.path.isfile(json_file_path))
with open(json_file_path, "r") as json_file:
json_payload = json.load(json_file)
json_payload["configurations"]["cluster-env"]["stack_tools"] = self.get_stack_tools()
json_payload["configurations"]["cluster-env"]["stack_features"] = self.get_stack_features()
config_dict = ConfigDictionary(json_payload)
family_mock.return_value = False
get_config_mock.return_value = config_dict
exists_mock.return_value = True
def hdp_select_call(command, **kwargs):
# return no versions
if "versions" in command:
return (0,"")
return (0,command)
call_mock.side_effect = hdp_select_call
# Ensure that the json file was actually read.
stack_name = default("/hostLevelParams/stack_name", None)
stack_version = default("/hostLevelParams/stack_version", None)
service_package_folder = default('/roleParams/service_package_folder', None)
self.assertEqual(stack_name, "HDP")
self.assertEqual(stack_version, '2.2')
self.assertEqual(service_package_folder, "common-services/HDFS/2.1.0.2.0/package")
# Begin the test
ru_execute = UpgradeSetAll()
ru_execute.actionexecute(None)
call_mock.assert_called_with(('ambari-python-wrap', u'/usr/bin/hdp-select', 'versions'), sudo = True)
self.assertEqual(call_mock.call_count, 1)
@patch("os.path.islink")
@patch("os.path.isdir")
@patch("resource_management.core.shell.call")
@patch.object(Script, 'get_config')
@patch.object(OSCheck, 'is_redhat_family')
def test_downgrade_unlink_configs(self, family_mock, get_config_mock, call_mock,
isdir_mock, islink_mock):
"""
Tests downgrading from 2.3 to 2.2 to ensure that conf symlinks are removed and the backup
directories restored.
"""
isdir_mock.return_value = True
# required for the test to run since the Execute calls need this
from resource_management.core.environment import Environment
env = Environment(test_mode=True)
with env:
# Mock the config objects
json_file_path = os.path.join(self.get_custom_actions_dir(), "ru_execute_tasks_namenode_prepare.json")
self.assertTrue(os.path.isfile(json_file_path))
with open(json_file_path, "r") as json_file:
json_payload = json.load(json_file)
# alter JSON for a downgrade from 2.3 to 2.2
json_payload['commandParams']['version'] = "2.2.0.0-1234"
json_payload['commandParams']['downgrade_from_version'] = "2.3.0.0-1234"
json_payload['commandParams']['source_stack'] = "HDP-2.2"
json_payload['commandParams']['target_stack'] = "HDP-2.3"
json_payload['commandParams']['upgrade_direction'] = "downgrade"
json_payload['hostLevelParams']['stack_version'] = "2.2"
json_payload["configurations"]["cluster-env"]["stack_tools"] = self.get_stack_tools()
json_payload["configurations"]["cluster-env"]["stack_features"] = self.get_stack_features()
config_dict = ConfigDictionary(json_payload)
family_mock.return_value = True
get_config_mock.return_value = config_dict
call_mock.side_effect = fake_call # echo the command
# test the function
ru_execute = UpgradeSetAll()
ru_execute.unlink_all_configs(None)
# verify that os.path.islink was called for each conf
self.assertTrue(islink_mock.called)
for key, value in conf_select.get_package_dirs().iteritems():
for directory_mapping in value:
original_config_directory = directory_mapping['conf_dir']
is_link_called = False
for call in islink_mock.call_args_list:
call_tuple = call[0]
if original_config_directory in call_tuple:
is_link_called = True
if not is_link_called:
self.fail("os.path.islink({0}) was never called".format(original_config_directory))
# alter JSON for a downgrade from 2.3 to 2.3
with open(json_file_path, "r") as json_file:
json_payload = json.load(json_file)
json_payload['commandParams']['version'] = "2.3.0.0-1234"
json_payload['commandParams']['downgrade_from_version'] = "2.3.0.0-5678"
json_payload['commandParams']['source_stack'] = "HDP-2.3"
json_payload['commandParams']['target_stack'] = "HDP-2.3"
json_payload['commandParams']['upgrade_direction'] = "downgrade"
json_payload['hostLevelParams']['stack_version'] = "2.3"
json_payload["configurations"]["cluster-env"]["stack_tools"] = self.get_stack_tools()
json_payload["configurations"]["cluster-env"]["stack_features"] = self.get_stack_features()
# reset config
config_dict = ConfigDictionary(json_payload)
family_mock.return_value = True
get_config_mock.return_value = config_dict
# reset mock
islink_mock.reset_mock()
# test the function
ru_execute = UpgradeSetAll()
ru_execute.unlink_all_configs(None)
# ensure it wasn't called this time
self.assertFalse(islink_mock.called)
with open(json_file_path, "r") as json_file:
json_payload = json.load(json_file)
# alter JSON for a downgrade from 2.2 to 2.2
json_payload['commandParams']['version'] = "2.2.0.0-1234"
json_payload['commandParams']['downgrade_from_version'] = "2.2.0.0-5678"
json_payload['commandParams']['source_stack'] = "HDP-2.2"
json_payload['commandParams']['target_stack'] = "HDP-2.2"
json_payload['commandParams']['upgrade_direction'] = "downgrade"
json_payload['hostLevelParams']['stack_version'] = "2.2"
json_payload["configurations"]["cluster-env"]["stack_tools"] = self.get_stack_tools()
json_payload["configurations"]["cluster-env"]["stack_features"] = self.get_stack_features()
# reset config
config_dict = ConfigDictionary(json_payload)
family_mock.return_value = True
get_config_mock.return_value = config_dict
# reset mock
islink_mock.reset_mock()
# test the function
ru_execute = UpgradeSetAll()
ru_execute.unlink_all_configs(None)
# ensure it wasn't called this time
self.assertFalse(islink_mock.called)
@patch("os.path.isdir")
@patch("os.path.islink")
def test_unlink_configs_missing_backup(self, islink_mock, isdir_mock):
# required for the test to run since the Execute calls need this
from resource_management.core.environment import Environment
env = Environment(test_mode=True)
with env:
# Case: missing backup directory
isdir_mock.return_value = False
ru_execute = UpgradeSetAll()
self.assertEqual(len(env.resource_list), 0)
# Case: missing symlink
isdir_mock.reset_mock()
isdir_mock.return_value = True
islink_mock.return_value = False
ru_execute._unlink_config("/fake/config")
self.assertEqual(len(env.resource_list), 2)
# Case: missing symlink
isdir_mock.reset_mock()
isdir_mock.return_value = True
islink_mock.reset_mock()
islink_mock.return_value = True
ru_execute._unlink_config("/fake/config")
self.assertEqual(pprint.pformat(env.resource_list),
"[Directory['/fake/config'],\n "
"Execute[('mv', '/fake/conf.backup', '/fake/config')],\n "
"Execute[('rm', '/fake/config')],\n "
"Execute[('mv', '/fake/conf.backup', '/fake/config')]]")
@patch("os.path.exists")
@patch("os.path.islink")
@patch("os.path.isdir")
@patch("os.path.isfile")
@patch("os.path.realpath")
@patch("shutil.rmtree")
def test_link_config(self, shutil_rmtree_mock, os_path_realpath_mock, os_path_isfile_mock,
os_path_isdir_mock, os_path_islink_mock,
os_path_exists_mock):
# Test normal flow
os_path_islink_mock.return_value = False
os_path_realpath_mock.return_value = "/some/another/path"
os_path_exists_mock.side_effect = [True, False]
old_config = "/old/config"
link_conf = "/link/config"
with Environment(test_mode=True) as RMFTestCase.env:
link_config(old_config, link_conf)
self.assertTrue(shutil_rmtree_mock.called)
self.assertEquals(shutil_rmtree_mock.call_args_list[0][0][0], old_config)
self.assertResourceCalled('Execute', ('cp', '-R', '-p', '/old/config', '/old/conf.backup'),
logoutput = True,
sudo = True,
)
self.assertResourceCalled('Link', '/old/config',
to = '/link/config',
)
self.assertNoMoreResources()
# Test case when link exists but is wrong
shutil_rmtree_mock.reset_mock()
os_path_islink_mock.return_value = True
with Environment(test_mode=True) as RMFTestCase.env:
link_config(old_config, link_conf)
self.assertFalse(shutil_rmtree_mock.called)
self.assertResourceCalled('Link', '/old/config',
to = '/link/config',
)
self.assertNoMoreResources()
# Test case when link exists and is correct
shutil_rmtree_mock.reset_mock()
os_path_islink_mock.return_value = True
os_path_realpath_mock.return_value = link_conf
with Environment(test_mode=True) as RMFTestCase.env:
link_config(old_config, link_conf)
self.assertFalse(shutil_rmtree_mock.called)
self.assertNoMoreResources()
# Test case when old link does not exist at all
shutil_rmtree_mock.reset_mock()
os_path_islink_mock.return_value = False
os_path_exists_mock.side_effect = [False]
with Environment(test_mode=True) as RMFTestCase.env:
link_config(old_config, link_conf)
self.assertFalse(shutil_rmtree_mock.called)
self.assertNoMoreResources()
# Test case when backup directory already exists
shutil_rmtree_mock.reset_mock()
os_path_islink_mock.return_value = False
os_path_exists_mock.side_effect = [True, True]
with Environment(test_mode=True) as RMFTestCase.env:
link_config(old_config, link_conf)
self.assertTrue(shutil_rmtree_mock.called)
self.assertEquals(shutil_rmtree_mock.call_args_list[0][0][0], old_config)
self.assertResourceCalled('Link', '/old/config',
to = '/link/config',
)
self.assertNoMoreResources()
|
bloomer1/incubator-zeppelin
|
refs/heads/master
|
spark/src/main/resources/python/zeppelin_pyspark.py
|
10
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys, getopt, traceback
from py4j.java_gateway import java_import, JavaGateway, GatewayClient
from py4j.protocol import Py4JJavaError
from pyspark.conf import SparkConf
from pyspark.context import SparkContext
from pyspark.rdd import RDD
from pyspark.files import SparkFiles
from pyspark.storagelevel import StorageLevel
from pyspark.accumulators import Accumulator, AccumulatorParam
from pyspark.broadcast import Broadcast
from pyspark.serializers import MarshalSerializer, PickleSerializer
# for back compatibility
from pyspark.sql import SQLContext, HiveContext, SchemaRDD, Row
class Logger(object):
def __init__(self):
self.out = ""
def write(self, message):
self.out = self.out + message
def get(self):
return self.out
def reset(self):
self.out = ""
class PyZeppelinContext(dict):
def __init__(self, zc):
self.z = zc
def show(self, obj):
from pyspark.sql import DataFrame
if isinstance(obj, DataFrame):
print gateway.jvm.org.apache.zeppelin.spark.ZeppelinContext.showDF(self.z, obj._jdf)
else:
print str(obj)
# By implementing special methods it makes operating on it more Pythonic
def __setitem__(self, key, item):
self.z.put(key, item)
def __getitem__(self, key):
return self.z.get(key)
def __delitem__(self, key):
self.z.remove(key)
def __contains__(self, item):
return self.z.containsKey(item)
def add(self, key, value):
self.__setitem__(key, value)
def put(self, key, value):
self.__setitem__(key, value)
def get(self, key):
return self.__getitem__(key)
output = Logger()
sys.stdout = output
sys.stderr = output
client = GatewayClient(port=int(sys.argv[1]))
sparkVersion = sys.argv[2]
if sparkVersion.startswith("1.4"):
gateway = JavaGateway(client, auto_convert = True)
else:
gateway = JavaGateway(client)
java_import(gateway.jvm, "org.apache.spark.SparkEnv")
java_import(gateway.jvm, "org.apache.spark.SparkConf")
java_import(gateway.jvm, "org.apache.spark.api.java.*")
java_import(gateway.jvm, "org.apache.spark.api.python.*")
java_import(gateway.jvm, "org.apache.spark.mllib.api.python.*")
intp = gateway.entry_point
intp.onPythonScriptInitialized()
jsc = intp.getJavaSparkContext()
if sparkVersion.startswith("1.2"):
java_import(gateway.jvm, "org.apache.spark.sql.SQLContext")
java_import(gateway.jvm, "org.apache.spark.sql.hive.HiveContext")
java_import(gateway.jvm, "org.apache.spark.sql.hive.LocalHiveContext")
java_import(gateway.jvm, "org.apache.spark.sql.hive.TestHiveContext")
elif sparkVersion.startswith("1.3"):
java_import(gateway.jvm, "org.apache.spark.sql.*")
java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
elif sparkVersion.startswith("1.4"):
java_import(gateway.jvm, "org.apache.spark.sql.*")
java_import(gateway.jvm, "org.apache.spark.sql.hive.*")
java_import(gateway.jvm, "scala.Tuple2")
jconf = intp.getSparkConf()
conf = SparkConf(_jvm = gateway.jvm, _jconf = jconf)
sc = SparkContext(jsc=jsc, gateway=gateway, conf=conf)
sqlc = SQLContext(sc, intp.getSQLContext())
sqlContext = sqlc
z = PyZeppelinContext(intp.getZeppelinContext())
while True :
req = intp.getStatements()
try:
stmts = req.statements().split("\n")
jobGroup = req.jobGroup()
final_code = None
for s in stmts:
if s == None:
continue
# skip comment
s_stripped = s.strip()
if len(s_stripped) == 0 or s_stripped.startswith("#"):
continue
if final_code:
final_code += "\n" + s
else:
final_code = s
if final_code:
compiledCode = compile(final_code, "<string>", "exec")
sc.setJobGroup(jobGroup, "Zeppelin")
eval(compiledCode)
intp.setStatementsFinished(output.get(), False)
except Py4JJavaError:
excInnerError = traceback.format_exc() # format_tb() does not return the inner exception
innerErrorStart = excInnerError.find("Py4JJavaError:")
if innerErrorStart > -1:
excInnerError = excInnerError[innerErrorStart:]
intp.setStatementsFinished(excInnerError + str(sys.exc_info()), True)
except:
intp.setStatementsFinished(traceback.format_exc(), True)
output.reset()
|
samirasnoun/django_cms_gallery_image
|
refs/heads/master
|
cms/test_utils/util/menu_extender.py
|
35
|
# -*- coding: utf-8 -*-
from menus.base import NavigationNode
from menus.menu_pool import menu_pool
from cms.menu_bases import CMSAttachMenu
class TestMenu(CMSAttachMenu):
name = "test menu"
def get_nodes(self, request):
nodes = []
n = NavigationNode('sample root page', "/", 1)
n2 = NavigationNode('sample settings page', "/bye/", 2)
n3 = NavigationNode('sample account page', "/hello/", 3)
n4 = NavigationNode('sample my profile page', "/hello/world/", 4, 3)
nodes.append(n)
nodes.append(n2)
nodes.append(n3)
nodes.append(n4)
return nodes
menu_pool.register_menu(TestMenu)
|
rhyolight/nupic
|
refs/heads/master
|
external/linux32/lib/python2.6/site-packages/matplotlib/cm.py
|
70
|
"""
This module contains the instantiations of color mapping classes
"""
import numpy as np
from numpy import ma
import matplotlib as mpl
import matplotlib.colors as colors
import matplotlib.cbook as cbook
from matplotlib._cm import *
def get_cmap(name=None, lut=None):
"""
Get a colormap instance, defaulting to rc values if *name* is None
"""
if name is None: name = mpl.rcParams['image.cmap']
if lut is None: lut = mpl.rcParams['image.lut']
assert(name in datad.keys())
return colors.LinearSegmentedColormap(name, datad[name], lut)
class ScalarMappable:
"""
This is a mixin class to support scalar -> RGBA mapping. Handles
normalization and colormapping
"""
def __init__(self, norm=None, cmap=None):
"""
*norm* is an instance of :class:`colors.Normalize` or one of
its subclasses, used to map luminance to 0-1. *cmap* is a
:mod:`cm` colormap instance, for example :data:`cm.jet`
"""
self.callbacksSM = cbook.CallbackRegistry((
'changed',))
if cmap is None: cmap = get_cmap()
if norm is None: norm = colors.Normalize()
self._A = None
self.norm = norm
self.cmap = cmap
self.colorbar = None
self.update_dict = {'array':False}
def set_colorbar(self, im, ax):
'set the colorbar image and axes associated with mappable'
self.colorbar = im, ax
def to_rgba(self, x, alpha=1.0, bytes=False):
'''Return a normalized rgba array corresponding to *x*. If *x*
is already an rgb array, insert *alpha*; if it is already
rgba, return it unchanged. If *bytes* is True, return rgba as
4 uint8s instead of 4 floats.
'''
try:
if x.ndim == 3:
if x.shape[2] == 3:
if x.dtype == np.uint8:
alpha = np.array(alpha*255, np.uint8)
m, n = x.shape[:2]
xx = np.empty(shape=(m,n,4), dtype = x.dtype)
xx[:,:,:3] = x
xx[:,:,3] = alpha
elif x.shape[2] == 4:
xx = x
else:
raise ValueError("third dimension must be 3 or 4")
if bytes and xx.dtype != np.uint8:
xx = (xx * 255).astype(np.uint8)
return xx
except AttributeError:
pass
x = ma.asarray(x)
x = self.norm(x)
x = self.cmap(x, alpha=alpha, bytes=bytes)
return x
def set_array(self, A):
'Set the image array from numpy array *A*'
self._A = A
self.update_dict['array'] = True
def get_array(self):
'Return the array'
return self._A
def get_cmap(self):
'return the colormap'
return self.cmap
def get_clim(self):
'return the min, max of the color limits for image scaling'
return self.norm.vmin, self.norm.vmax
def set_clim(self, vmin=None, vmax=None):
"""
set the norm limits for image scaling; if *vmin* is a length2
sequence, interpret it as ``(vmin, vmax)`` which is used to
support setp
ACCEPTS: a length 2 sequence of floats
"""
if (vmin is not None and vmax is None and
cbook.iterable(vmin) and len(vmin)==2):
vmin, vmax = vmin
if vmin is not None: self.norm.vmin = vmin
if vmax is not None: self.norm.vmax = vmax
self.changed()
def set_cmap(self, cmap):
"""
set the colormap for luminance data
ACCEPTS: a colormap
"""
if cmap is None: cmap = get_cmap()
self.cmap = cmap
self.changed()
def set_norm(self, norm):
'set the normalization instance'
if norm is None: norm = colors.Normalize()
self.norm = norm
self.changed()
def autoscale(self):
"""
Autoscale the scalar limits on the norm instance using the
current array
"""
if self._A is None:
raise TypeError('You must first set_array for mappable')
self.norm.autoscale(self._A)
self.changed()
def autoscale_None(self):
"""
Autoscale the scalar limits on the norm instance using the
current array, changing only limits that are None
"""
if self._A is None:
raise TypeError('You must first set_array for mappable')
self.norm.autoscale_None(self._A)
self.changed()
def add_checker(self, checker):
"""
Add an entry to a dictionary of boolean flags
that are set to True when the mappable is changed.
"""
self.update_dict[checker] = False
def check_update(self, checker):
"""
If mappable has changed since the last check,
return True; else return False
"""
if self.update_dict[checker]:
self.update_dict[checker] = False
return True
return False
def changed(self):
"""
Call this whenever the mappable is changed to notify all the
callbackSM listeners to the 'changed' signal
"""
self.callbacksSM.process('changed', self)
for key in self.update_dict:
self.update_dict[key] = True
|
jepler/linuxcnc-mirror
|
refs/heads/master
|
tests/mdi-while-queuebuster-waitflag/test-ui.py
|
5
|
#!/usr/bin/env python
import linuxcnc, hal
import sys
# Initialization
c = linuxcnc.command()
s = linuxcnc.stat()
c.state(linuxcnc.STATE_ESTOP_RESET)
c.state(linuxcnc.STATE_ON)
c.mode(linuxcnc.MODE_MDI)
c.mdi('(print,pre 1)')
c.mdi('(print,pre 2)')
c.mdi('M400')
c.mdi('(print,post 1)')
c.mdi('(print,post 2)')
c.mdi('(print,post 3)')
c.mdi('(print,post 4)')
c.mdi('(print,post 5)')
c.mdi('(print,post 6)')
# Shutdown
c.wait_complete()
sys.exit(0)
|
storpipfugl/airflow
|
refs/heads/master
|
airflow/migrations/versions/1507a7289a2f_create_is_encrypted.py
|
37
|
"""create is_encrypted
Revision ID: 1507a7289a2f
Revises: e3a246e0dc1
Create Date: 2015-08-18 18:57:51.927315
"""
# revision identifiers, used by Alembic.
revision = '1507a7289a2f'
down_revision = 'e3a246e0dc1'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
from sqlalchemy.engine.reflection import Inspector
from airflow import settings
connectionhelper = sa.Table(
'connection',
sa.MetaData(),
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('is_encrypted')
)
def upgrade():
# first check if the user already has this done. This should only be
# true for users who are upgrading from a previous version of Airflow
# that predates Alembic integration
inspector = Inspector.from_engine(settings.engine)
# this will only be true if 'connection' already exists in the db,
# but not if alembic created it in a previous migration
if 'connection' in inspector.get_table_names():
col_names = [c['name'] for c in inspector.get_columns('connection')]
if 'is_encrypted' in col_names:
return
op.add_column(
'connection',
sa.Column('is_encrypted', sa.Boolean, unique=False, default=False))
conn = op.get_bind()
conn.execute(
connectionhelper.update().values(is_encrypted=False)
)
def downgrade():
op.drop_column('connection', 'is_encrypted')
|
nclsHart/glances
|
refs/heads/master
|
glances/exports/glances_csv.py
|
1
|
# -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
# Copyright (C) 2015 Nicolargo <nicolas@nicolargo.com>
#
# Glances is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Glances is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""CSV interface class."""
# Import sys libs
import csv
import sys
# Import Glances lib
from glances.core.glances_globals import is_py3
from glances.core.glances_logging import logger
from glances.exports.glances_export import GlancesExport
class Export(GlancesExport):
"""This class manages the CSV export module."""
def __init__(self, config=None, args=None):
"""Init the CSV export IF."""
GlancesExport.__init__(self, config=config, args=args)
# CSV file name
self.csv_filename = args.export_csv
# Set the CSV output file
try:
if is_py3:
self.csv_file = open(self.csv_filename, 'w', newline='')
else:
self.csv_file = open(self.csv_filename, 'wb')
self.writer = csv.writer(self.csv_file)
except IOError as e:
logger.critical("Cannot create the CSV file: {0}".format(e))
sys.exit(2)
logger.info("Stats exported to CSV file: {0}".format(self.csv_filename))
self.export_enable = True
self.first_line = True
def exit(self):
"""Close the CSV file."""
logger.debug("Finalise export interface %s" % self.export_name)
self.csv_file.close()
def update(self, stats):
"""Update stats in the CSV output file."""
csv_header = []
csv_data = []
# Get the stats
all_stats = stats.getAll()
plugins = stats.getAllPlugins()
# Loop over available plugin
i = 0
for plugin in plugins:
if plugin in self.plugins_to_export():
if type(all_stats[i]) is list:
for item in all_stats[i]:
# First line: header
if self.first_line:
fieldnames = item.keys()
csv_header += map(lambda x: plugin+'_'+item[item['key']]+'_'+x, item)
# Others lines: stats
fieldvalues = item.values()
csv_data += fieldvalues
elif type(all_stats[i]) is dict:
# First line: header
if self.first_line:
fieldnames = all_stats[i].keys()
csv_header += map(lambda x: plugin+'_'+x, fieldnames)
# Others lines: stats
fieldvalues = all_stats[i].values()
csv_data += fieldvalues
i += 1
# Export to CSV
if self.first_line:
self.writer.writerow(csv_header)
self.first_line = False
self.writer.writerow(csv_data)
self.csv_file.flush()
|
snnn/tensorflow
|
refs/heads/master
|
tensorflow/python/estimator/export/export.py
|
6
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Configuration and utilities for receiving inputs at serving time."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import os
import six
from tensorflow.python.estimator import util
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import signature_def_utils
from tensorflow.python.util import compat
from tensorflow.python.util.tf_export import estimator_export
_SINGLE_FEATURE_DEFAULT_NAME = 'feature'
_SINGLE_RECEIVER_DEFAULT_NAME = 'input'
_SINGLE_LABEL_DEFAULT_NAME = 'label'
_SINGLE_TENSOR_DEFAULT_NAMES = {
'feature': _SINGLE_FEATURE_DEFAULT_NAME,
'label': _SINGLE_LABEL_DEFAULT_NAME,
'receiver_tensor': _SINGLE_RECEIVER_DEFAULT_NAME,
'receiver_tensors_alternative': _SINGLE_RECEIVER_DEFAULT_NAME
}
def _wrap_and_check_input_tensors(tensors, field_name):
"""Ensure that tensors is a dict of str to Tensor mappings.
Args:
tensors: dict of str to Tensors, or a single Tensor.
field_name: name of the member field of `ServingInputReceiver`
whose value is being passed to `tensors`.
Returns:
dict of str to Tensors; this is the original dict if one was passed, or
the original tensor wrapped in a dictionary.
Raises:
ValueError: if tensors is None, or has non-string keys,
or non-Tensor values
"""
if tensors is None:
raise ValueError('{}s must be defined.'.format(field_name))
if not isinstance(tensors, dict):
tensors = {_SINGLE_TENSOR_DEFAULT_NAMES[field_name]: tensors}
for name, tensor in tensors.items():
_check_tensor_key(name, error_label=field_name)
_check_tensor(tensor, name, error_label=field_name)
return tensors
def _check_tensor(tensor, name, error_label='feature'):
"""Check that passed `tensor` is a Tensor or SparseTensor."""
if not (isinstance(tensor, ops.Tensor) or
isinstance(tensor, sparse_tensor.SparseTensor)):
fmt_name = ' {}'.format(name) if name else ''
value_error = ValueError('{}{} must be a Tensor or SparseTensor.'.format(
error_label, fmt_name))
# NOTE(ericmc): This if-else block is a specific carve-out for
# LabeledTensor, which has a `.tensor` attribute and which is
# convertible to tf.Tensor via ops.convert_to_tensor.
# Allowing all types convertible to tf.Tensor is considered by soergel@
# to be too permissive.
# TODO(soergel): accept any type convertible to Tensor,
# as in cl/193238295 snapshot #6.
if hasattr(tensor, 'tensor'):
try:
ops.convert_to_tensor(tensor)
except TypeError:
raise value_error
else:
raise value_error
def _check_tensor_key(name, error_label='feature'):
if not isinstance(name, six.string_types):
raise ValueError('{} keys must be strings: {}.'.format(error_label, name))
@estimator_export('estimator.export.ServingInputReceiver')
class ServingInputReceiver(
collections.namedtuple(
'ServingInputReceiver',
['features', 'receiver_tensors', 'receiver_tensors_alternatives'])):
"""A return type for a serving_input_receiver_fn.
The expected return values are:
features: A `Tensor`, `SparseTensor`, or dict of string to `Tensor` or
`SparseTensor`, specifying the features to be passed to the model. Note:
if `features` passed is not a dict, it will be wrapped in a dict with a
single entry, using 'feature' as the key. Consequently, the model must
accept a feature dict of the form {'feature': tensor}. You may use
`TensorServingInputReceiver` if you want the tensor to be passed as is.
receiver_tensors: A `Tensor`, `SparseTensor`, or dict of string to `Tensor`
or `SparseTensor`, specifying input nodes where this receiver expects to
be fed by default. Typically, this is a single placeholder expecting
serialized `tf.Example` protos.
receiver_tensors_alternatives: a dict of string to additional
groups of receiver tensors, each of which may be a `Tensor`,
`SparseTensor`, or dict of string to `Tensor` or`SparseTensor`.
These named receiver tensor alternatives generate additional serving
signatures, which may be used to feed inputs at different points within
the input receiver subgraph. A typical usage is to allow feeding raw
feature `Tensor`s *downstream* of the tf.parse_example() op.
Defaults to None.
"""
def __new__(cls,
features,
receiver_tensors,
receiver_tensors_alternatives=None):
features = _wrap_and_check_input_tensors(features, 'feature')
receiver_tensors = _wrap_and_check_input_tensors(receiver_tensors,
'receiver_tensor')
if receiver_tensors_alternatives is not None:
if not isinstance(receiver_tensors_alternatives, dict):
raise ValueError(
'receiver_tensors_alternatives must be a dict: {}.'.format(
receiver_tensors_alternatives))
for alternative_name, receiver_tensors_alt in (
six.iteritems(receiver_tensors_alternatives)):
# Updating dict during iteration is OK in this case.
receiver_tensors_alternatives[alternative_name] = (
_wrap_and_check_input_tensors(
receiver_tensors_alt, 'receiver_tensors_alternative'))
return super(ServingInputReceiver, cls).__new__(
cls,
features=features,
receiver_tensors=receiver_tensors,
receiver_tensors_alternatives=receiver_tensors_alternatives)
@estimator_export('estimator.export.TensorServingInputReceiver')
class TensorServingInputReceiver(
collections.namedtuple(
'TensorServingInputReceiver',
['features', 'receiver_tensors', 'receiver_tensors_alternatives'])):
"""A return type for a serving_input_receiver_fn.
This is for use with models that expect a single `Tensor` or `SparseTensor`
as an input feature, as opposed to a dict of features.
The normal `ServingInputReceiver` always returns a feature dict, even if it
contains only one entry, and so can be used only with models that accept such
a dict. For models that accept only a single raw feature, the
`serving_input_receiver_fn` provided to `Estimator.export_savedmodel()` should
return this `TensorServingInputReceiver` instead. See:
https://github.com/tensorflow/tensorflow/issues/11674
Note that the receiver_tensors and receiver_tensor_alternatives arguments
will be automatically converted to the dict representation in either case,
because the SavedModel format requires each input `Tensor` to have a name
(provided by the dict key).
The expected return values are:
features: A single `Tensor` or `SparseTensor`, representing the feature
to be passed to the model.
receiver_tensors: A `Tensor`, `SparseTensor`, or dict of string to `Tensor`
or `SparseTensor`, specifying input nodes where this receiver expects to
be fed by default. Typically, this is a single placeholder expecting
serialized `tf.Example` protos.
receiver_tensors_alternatives: a dict of string to additional
groups of receiver tensors, each of which may be a `Tensor`,
`SparseTensor`, or dict of string to `Tensor` or`SparseTensor`.
These named receiver tensor alternatives generate additional serving
signatures, which may be used to feed inputs at different points within
the input receiver subgraph. A typical usage is to allow feeding raw
feature `Tensor`s *downstream* of the tf.parse_example() op.
Defaults to None.
"""
def __new__(cls,
features,
receiver_tensors,
receiver_tensors_alternatives=None):
if features is None:
raise ValueError('features must be defined.')
_check_tensor(features, None)
receiver = ServingInputReceiver(
features=features,
receiver_tensors=receiver_tensors,
receiver_tensors_alternatives=receiver_tensors_alternatives)
return super(TensorServingInputReceiver, cls).__new__(
cls,
features=receiver.features[_SINGLE_FEATURE_DEFAULT_NAME],
receiver_tensors=receiver.receiver_tensors,
receiver_tensors_alternatives=receiver.receiver_tensors_alternatives)
class UnsupervisedInputReceiver(ServingInputReceiver):
"""A return type for a training_input_receiver_fn or eval_input_receiver_fn.
This differs from SupervisedInputReceiver in that it does not require a set
of labels.
The expected return values are:
features: A `Tensor`, `SparseTensor`, or dict of string to `Tensor` or
`SparseTensor`, specifying the features to be passed to the model.
receiver_tensors: A `Tensor`, `SparseTensor`, or dict of string to `Tensor`
or `SparseTensor`, specifying input nodes where this receiver expects to
be fed by default. Typically, this is a single placeholder expecting
serialized `tf.Example` protos.
"""
def __new__(cls, features, receiver_tensors):
return super(UnsupervisedInputReceiver, cls).__new__(
cls,
features=features,
receiver_tensors=receiver_tensors,
receiver_tensors_alternatives=None)
class SupervisedInputReceiver(
collections.namedtuple('SupervisedInputReceiver',
['features', 'labels', 'receiver_tensors'])):
"""A return type for a training_input_receiver_fn or eval_input_receiver_fn.
This differs from a ServingInputReceiver in that (1) this receiver expects
a set of labels to be passed in with features, and (2) this receiver does
not support receiver_tensors_alternatives, which are primarily used for
serving.
The expected return values are:
features: A `Tensor`, `SparseTensor`, or dict of string to `Tensor` or
`SparseTensor`, specifying the features to be passed to the model.
labels: A `Tensor`, `SparseTensor`, or dict of string to `Tensor` or
`SparseTensor`, specifying the labels to be passed to the model.
receiver_tensors: A `Tensor`, `SparseTensor`, or dict of string to `Tensor`
or `SparseTensor`, specifying input nodes where this receiver expects to
be fed by default. Typically, this is a single placeholder expecting
serialized `tf.Example` protos.
"""
def __new__(cls, features, labels, receiver_tensors):
# Both features and labels can be dicts or raw tensors.
for input_vals, error_label in ((features, 'feature'), (labels, 'label')):
# _wrap_and_check_input_tensors is called here only to validate the
# tensors. The wrapped dict that is returned is deliberately discarded.
_wrap_and_check_input_tensors(input_vals, error_label)
receiver_tensors = _wrap_and_check_input_tensors(receiver_tensors,
'receiver_tensor')
return super(SupervisedInputReceiver, cls).__new__(
cls,
features=features,
labels=labels,
receiver_tensors=receiver_tensors)
@estimator_export('estimator.export.build_parsing_serving_input_receiver_fn')
def build_parsing_serving_input_receiver_fn(feature_spec,
default_batch_size=None):
"""Build a serving_input_receiver_fn expecting fed tf.Examples.
Creates a serving_input_receiver_fn that expects a serialized tf.Example fed
into a string placeholder. The function parses the tf.Example according to
the provided feature_spec, and returns all parsed Tensors as features.
Args:
feature_spec: a dict of string to `VarLenFeature`/`FixedLenFeature`.
default_batch_size: the number of query examples expected per batch.
Leave unset for variable batch size (recommended).
Returns:
A serving_input_receiver_fn suitable for use in serving.
"""
def serving_input_receiver_fn():
"""An input_fn that expects a serialized tf.Example."""
serialized_tf_example = array_ops.placeholder(
dtype=dtypes.string,
shape=[default_batch_size],
name='input_example_tensor')
receiver_tensors = {'examples': serialized_tf_example}
features = parsing_ops.parse_example(serialized_tf_example, feature_spec)
return ServingInputReceiver(features, receiver_tensors)
return serving_input_receiver_fn
def _placeholder_from_tensor(t, default_batch_size=None):
"""Creates a placeholder that matches the dtype and shape of passed tensor.
Args:
t: Tensor or EagerTensor
default_batch_size: the number of query examples expected per batch.
Leave unset for variable batch size (recommended).
Returns:
Placeholder that matches the passed tensor.
"""
batch_shape = tensor_shape.TensorShape([default_batch_size])
shape = batch_shape.concatenate(t.get_shape()[1:])
# Reuse the feature tensor's op name (t.op.name) for the placeholder,
# excluding the index from the tensor's name (t.name):
# t.name = "%s:%d" % (t.op.name, t._value_index)
try:
name = t.op.name
except AttributeError:
# In Eager mode, tensors don't have ops or names, and while they do have
# IDs, those are not maintained across runs. The name here is used
# primarily for debugging, and is not critical to the placeholder.
# So, in order to make this Eager-compatible, continue with an empty
# name if none is available.
name = None
return array_ops.placeholder(dtype=t.dtype, shape=shape, name=name)
def _placeholders_from_receiver_tensors_dict(input_vals,
default_batch_size=None):
return {
name: _placeholder_from_tensor(t, default_batch_size)
for name, t in input_vals.items()
}
@estimator_export('estimator.export.build_raw_serving_input_receiver_fn')
def build_raw_serving_input_receiver_fn(features, default_batch_size=None):
"""Build a serving_input_receiver_fn expecting feature Tensors.
Creates an serving_input_receiver_fn that expects all features to be fed
directly.
Args:
features: a dict of string to `Tensor`.
default_batch_size: the number of query examples expected per batch.
Leave unset for variable batch size (recommended).
Returns:
A serving_input_receiver_fn.
"""
def serving_input_receiver_fn():
"""A serving_input_receiver_fn that expects features to be fed directly."""
receiver_tensors = _placeholders_from_receiver_tensors_dict(
features, default_batch_size)
return ServingInputReceiver(receiver_tensors, receiver_tensors)
return serving_input_receiver_fn
def build_raw_supervised_input_receiver_fn(features,
labels,
default_batch_size=None):
"""Build a supervised_input_receiver_fn for raw features and labels.
This function wraps tensor placeholders in a supervised_receiver_fn
with the expectation that the features and labels appear precisely as
the model_fn expects them. Features and labels can therefore be dicts of
tensors, or raw tensors.
Args:
features: a dict of string to `Tensor` or `Tensor`.
labels: a dict of string to `Tensor` or `Tensor`.
default_batch_size: the number of query examples expected per batch.
Leave unset for variable batch size (recommended).
Returns:
A supervised_input_receiver_fn.
Raises:
ValueError: if features and labels have overlapping keys.
"""
# Check for overlapping keys before beginning.
try:
feat_keys = features.keys()
except AttributeError:
feat_keys = [_SINGLE_RECEIVER_DEFAULT_NAME]
try:
label_keys = labels.keys()
except AttributeError:
label_keys = [_SINGLE_LABEL_DEFAULT_NAME]
overlap_keys = set(feat_keys) & set(label_keys)
if overlap_keys:
raise ValueError('Features and labels must have distinct keys. '
'Found overlapping keys: {}'.format(overlap_keys))
def supervised_input_receiver_fn():
"""A receiver_fn that expects pass-through features and labels."""
if not isinstance(features, dict):
features_cp = _placeholder_from_tensor(features, default_batch_size)
receiver_features = {_SINGLE_RECEIVER_DEFAULT_NAME: features_cp}
else:
receiver_features = _placeholders_from_receiver_tensors_dict(
features, default_batch_size)
features_cp = receiver_features
if not isinstance(labels, dict):
labels_cp = _placeholder_from_tensor(labels, default_batch_size)
receiver_labels = {_SINGLE_LABEL_DEFAULT_NAME: labels_cp}
else:
receiver_labels = _placeholders_from_receiver_tensors_dict(
labels, default_batch_size)
labels_cp = receiver_labels
receiver_tensors = dict(receiver_features)
receiver_tensors.update(receiver_labels)
return SupervisedInputReceiver(features_cp, labels_cp, receiver_tensors)
return supervised_input_receiver_fn
def build_supervised_input_receiver_fn_from_input_fn(input_fn, **input_fn_args):
"""Get a function that returns a SupervisedInputReceiver matching an input_fn.
Note that this function calls the input_fn in a local graph in order to
extract features and labels. Placeholders are then created from those
features and labels in the default graph.
Args:
input_fn: An Estimator input_fn, which is a function that returns one of:
* A 'tf.data.Dataset' object: Outputs of `Dataset` object must be a
tuple (features, labels) with same constraints as below.
* A tuple (features, labels): Where `features` is a `Tensor` or a
dictionary of string feature name to `Tensor` and `labels` is a
`Tensor` or a dictionary of string label name to `Tensor`. Both
`features` and `labels` are consumed by `model_fn`. They should
satisfy the expectation of `model_fn` from inputs.
**input_fn_args: set of kwargs to be passed to the input_fn. Note that
these will not be checked or validated here, and any errors raised by
the input_fn will be thrown to the top.
Returns:
A function taking no arguments that, when called, returns a
SupervisedInputReceiver. This function can be passed in as part of the
input_receiver_map when exporting SavedModels from Estimator with multiple
modes.
"""
# Wrap the input_fn call in a graph to prevent sullying the default namespace
with ops.Graph().as_default():
result = input_fn(**input_fn_args)
features, labels, _ = util.parse_input_fn_result(result)
# Placeholders are created back in the default graph.
return build_raw_supervised_input_receiver_fn(features, labels)
### Below utilities are specific to SavedModel exports.
def build_all_signature_defs(receiver_tensors,
export_outputs,
receiver_tensors_alternatives=None,
serving_only=True):
"""Build `SignatureDef`s for all export outputs.
Args:
receiver_tensors: a `Tensor`, or a dict of string to `Tensor`, specifying
input nodes where this receiver expects to be fed by default. Typically,
this is a single placeholder expecting serialized `tf.Example` protos.
export_outputs: a dict of ExportOutput instances, each of which has
an as_signature_def instance method that will be called to retrieve
the signature_def for all export output tensors.
receiver_tensors_alternatives: a dict of string to additional
groups of receiver tensors, each of which may be a `Tensor` or a dict of
string to `Tensor`. These named receiver tensor alternatives generate
additional serving signatures, which may be used to feed inputs at
different points within the input receiver subgraph. A typical usage is
to allow feeding raw feature `Tensor`s *downstream* of the
tf.parse_example() op. Defaults to None.
serving_only: boolean; if true, resulting signature defs will only include
valid serving signatures. If false, all requested signatures will be
returned.
Returns:
signature_def representing all passed args.
Raises:
ValueError: if export_outputs is not a dict
"""
if not isinstance(receiver_tensors, dict):
receiver_tensors = {_SINGLE_RECEIVER_DEFAULT_NAME: receiver_tensors}
if export_outputs is None or not isinstance(export_outputs, dict):
raise ValueError('export_outputs must be a dict and not'
'{}'.format(type(export_outputs)))
signature_def_map = {}
excluded_signatures = {}
for output_key, export_output in export_outputs.items():
signature_name = '{}'.format(output_key or 'None')
try:
signature = export_output.as_signature_def(receiver_tensors)
signature_def_map[signature_name] = signature
except ValueError as e:
excluded_signatures[signature_name] = str(e)
if receiver_tensors_alternatives:
for receiver_name, receiver_tensors_alt in (
six.iteritems(receiver_tensors_alternatives)):
if not isinstance(receiver_tensors_alt, dict):
receiver_tensors_alt = {
_SINGLE_RECEIVER_DEFAULT_NAME: receiver_tensors_alt
}
for output_key, export_output in export_outputs.items():
signature_name = '{}:{}'.format(receiver_name or 'None', output_key or
'None')
try:
signature = export_output.as_signature_def(receiver_tensors_alt)
signature_def_map[signature_name] = signature
except ValueError as e:
excluded_signatures[signature_name] = str(e)
_log_signature_report(signature_def_map, excluded_signatures)
# The above calls to export_output.as_signature_def should return only
# valid signatures; if there is a validity problem, they raise a ValueError,
# in which case we exclude that signature from signature_def_map above.
# The is_valid_signature check ensures that the signatures produced are
# valid for serving, and acts as an additional sanity check for export
# signatures produced for serving. We skip this check for training and eval
# signatures, which are not intended for serving.
if serving_only:
signature_def_map = {
k: v
for k, v in signature_def_map.items()
if signature_def_utils.is_valid_signature(v)
}
return signature_def_map
_FRIENDLY_METHOD_NAMES = {
signature_constants.CLASSIFY_METHOD_NAME: 'Classify',
signature_constants.REGRESS_METHOD_NAME: 'Regress',
signature_constants.PREDICT_METHOD_NAME: 'Predict',
signature_constants.SUPERVISED_TRAIN_METHOD_NAME: 'Train',
signature_constants.SUPERVISED_EVAL_METHOD_NAME: 'Eval',
}
def _log_signature_report(signature_def_map, excluded_signatures):
"""Log a report of which signatures were produced."""
sig_names_by_method_name = collections.defaultdict(list)
# We'll collect whatever method_names are present, but also we want to make
# sure to output a line for each of the three standard methods even if they
# have no signatures.
for method_name in _FRIENDLY_METHOD_NAMES:
sig_names_by_method_name[method_name] = []
for signature_name, sig in signature_def_map.items():
sig_names_by_method_name[sig.method_name].append(signature_name)
# TODO(b/67733540): consider printing the full signatures, not just names
for method_name, sig_names in sig_names_by_method_name.items():
if method_name in _FRIENDLY_METHOD_NAMES:
method_name = _FRIENDLY_METHOD_NAMES[method_name]
logging.info('Signatures INCLUDED in export for {}: {}'.format(
method_name, sig_names if sig_names else 'None'))
if excluded_signatures:
logging.info('Signatures EXCLUDED from export because they cannot be '
'be served via TensorFlow Serving APIs:')
for signature_name, message in excluded_signatures.items():
logging.info('\'{}\' : {}'.format(signature_name, message))
if not signature_def_map:
logging.warn('Export includes no signatures!')
elif (signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY not in
signature_def_map):
logging.warn('Export includes no default signature!')
def get_timestamped_export_dir(export_dir_base):
"""Builds a path to a new subdirectory within the base directory.
Each export is written into a new subdirectory named using the
current time. This guarantees monotonically increasing version
numbers even across multiple runs of the pipeline.
The timestamp used is the number of seconds since epoch UTC.
Args:
export_dir_base: A string containing a directory to write the exported
graph and checkpoints.
Returns:
The full path of the new subdirectory (which is not actually created yet).
Raises:
RuntimeError: if repeated attempts fail to obtain a unique timestamped
directory name.
"""
return util.get_timestamped_dir(export_dir_base)
def get_temp_export_dir(timestamped_export_dir):
"""Builds a directory name based on the argument but starting with 'temp-'.
This relies on the fact that TensorFlow Serving ignores subdirectories of
the base directory that can't be parsed as integers.
Args:
timestamped_export_dir: the name of the eventual export directory, e.g.
/foo/bar/<timestamp>
Returns:
A sister directory prefixed with 'temp-', e.g. /foo/bar/temp-<timestamp>.
"""
(dirname, basename) = os.path.split(timestamped_export_dir)
temp_export_dir = os.path.join(
compat.as_bytes(dirname), compat.as_bytes('temp-{}'.format(basename)))
return temp_export_dir
|
westinedu/newertrends
|
refs/heads/master
|
zinnia/management/commands/feed2zinnia.py
|
2
|
"""Feed to Zinnia command module"""
import os
import sys
from urllib2 import urlopen
from datetime import datetime
from optparse import make_option
from django.conf import settings
from django.utils import timezone
from django.core.files import File
from django.utils.text import Truncator
from django.utils.html import strip_tags
from django.db.utils import IntegrityError
from django.utils.encoding import smart_str
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.template.defaultfilters import slugify
from django.core.management.base import CommandError
from django.core.management.base import LabelCommand
from django.core.files.temp import NamedTemporaryFile
from zinnia import __version__
from zinnia.models import Entry
from zinnia.models import Category
from zinnia.managers import PUBLISHED
from zinnia.signals import disconnect_zinnia_signals
class Command(LabelCommand):
"""Command object for importing a RSS or Atom
feed into Zinnia."""
help = 'Import a RSS or Atom feed into Zinnia.'
label = 'feed url'
args = 'url'
option_list = LabelCommand.option_list + (
make_option('--no-auto-excerpt', action='store_false',
dest='auto-excerpt', default=True,
help='Do NOT generate an excerpt if not present.'),
make_option('--no-enclosure', action='store_false',
dest='image-enclosure', default=True,
help='Do NOT save image enclosure if present.'),
make_option('--no-tags', action='store_false',
dest='tags', default=True,
help='Do NOT store categories as tags'),
make_option('--author', dest='author', default='',
help='All imported entries belong to specified author'),
)
SITE = Site.objects.get_current()
def __init__(self):
"""Init the Command and add custom styles"""
super(Command, self).__init__()
self.style.TITLE = self.style.SQL_FIELD
self.style.STEP = self.style.SQL_COLTYPE
self.style.ITEM = self.style.HTTP_INFO
disconnect_zinnia_signals()
def write_out(self, message, verbosity_level=1):
"""Convenient method for outputing"""
if self.verbosity and self.verbosity >= verbosity_level:
sys.stdout.write(smart_str(message))
sys.stdout.flush()
def handle_label(self, url, **options):
try:
import feedparser
except ImportError:
raise CommandError('You need to install the feedparser ' \
'module to run this command.')
self.tags = options.get('tags', True)
self.default_author = options.get('author')
self.verbosity = int(options.get('verbosity', 1))
self.auto_excerpt = options.get('auto-excerpt', True)
self.image_enclosure = options.get('image-enclosure', True)
if self.default_author:
try:
self.default_author = User.objects.get(
username=self.default_author)
except User.DoesNotExist:
raise CommandError('Invalid username for default author')
self.write_out(self.style.TITLE(
'Starting importation of %s to Zinnia %s:\n' % (url, __version__)))
feed = feedparser.parse(url)
self.import_entries(feed.entries)
def import_entries(self, feed_entries):
"""Import entries"""
for feed_entry in feed_entries:
self.write_out('> %s... ' % feed_entry.title)
if feed_entry.get('publised_parsed'):
creation_date = datetime(*feed_entry.published_parsed[:6])
if settings.USE_TZ:
creation_date = timezone.make_aware(
creation_date, timezone.utc)
else:
creation_date = timezone.now()
slug = slugify(feed_entry.title)[:255]
if Entry.objects.filter(creation_date__year=creation_date.year,
creation_date__month=creation_date.month,
creation_date__day=creation_date.day,
slug=slug):
self.write_out(self.style.NOTICE(
'SKIPPED (already imported)\n'))
continue
categories = self.import_categories(feed_entry)
entry_dict = {'title': feed_entry.title[:255],
'content': feed_entry.description,
'excerpt': feed_entry.get('summary'),
'status': PUBLISHED,
'creation_date': creation_date,
'start_publication': creation_date,
'last_update': timezone.now(),
'slug': slug}
if not entry_dict['excerpt'] and self.auto_excerpt:
entry_dict['excerpt'] = Truncator('...').words(
50, strip_tags(feed_entry.description))
if self.tags:
entry_dict['tags'] = self.import_tags(categories)
entry = Entry(**entry_dict)
entry.save()
entry.categories.add(*categories)
entry.sites.add(self.SITE)
if self.image_enclosure:
for enclosure in feed_entry.enclosures:
if 'image' in enclosure.get('type') \
and enclosure.get('href'):
img_tmp = NamedTemporaryFile(delete=True)
img_tmp.write(urlopen(enclosure['href']).read())
img_tmp.flush()
entry.image.save(os.path.basename(enclosure['href']),
File(img_tmp))
break
if self.default_author:
entry.authors.add(self.default_author)
elif feed_entry.get('author_detail'):
try:
user = User.objects.create_user(
slugify(feed_entry.author_detail.get('name')),
feed_entry.author_detail.get('email', ''))
except IntegrityError:
user = User.objects.get(
username=slugify(feed_entry.author_detail.get('name')))
entry.authors.add(user)
self.write_out(self.style.ITEM('OK\n'))
def import_categories(self, feed_entry):
categories = []
for cat in feed_entry.get('tags', ''):
category, created = Category.objects.get_or_create(
slug=slugify(cat.term), defaults={'title': cat.term})
categories.append(category)
return categories
def import_tags(self, categories):
tags = []
for cat in categories:
if len(cat.title.split()) > 1:
tags.append('"%s"' % slugify(cat.title).replace('-', ' '))
else:
tags.append(slugify(cat.title).replace('-', ' '))
return ', '.join(tags)
|
kawamon/hue
|
refs/heads/master
|
desktop/core/ext-py/Paste-2.0.1/tests/test_auth/test_auth_digest.py
|
47
|
# (c) 2005 Clark C. Evans
# This module is part of the Python Paste Project and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from paste.auth.digest import *
from paste.wsgilib import raw_interactive
from paste.httpexceptions import *
from paste.httpheaders import AUTHORIZATION, WWW_AUTHENTICATE, REMOTE_USER
import os
import six
def application(environ, start_response):
content = REMOTE_USER(environ)
start_response("200 OK",(('Content-Type', 'text/plain'),
('Content-Length', len(content))))
if six.PY3:
content = content.encode('utf8')
return [content]
realm = "tag:clarkevans.com,2005:testing"
def backwords(environ, realm, username):
""" dummy password hash, where user password is just reverse """
password = list(username)
password.reverse()
password = "".join(password)
return digest_password(realm, username, password)
application = AuthDigestHandler(application,realm,backwords)
application = HTTPExceptionHandler(application)
def check(username, password, path="/"):
""" perform two-stage authentication to verify login """
(status,headers,content,errors) = \
raw_interactive(application,path, accept='text/html')
assert status.startswith("401")
challenge = WWW_AUTHENTICATE(headers)
response = AUTHORIZATION(username=username, password=password,
challenge=challenge, path=path)
assert "Digest" in response and username in response
(status,headers,content,errors) = \
raw_interactive(application,path,
HTTP_AUTHORIZATION=response)
if status.startswith("200"):
return content
if status.startswith("401"):
return None
assert False, "Unexpected Status: %s" % status
def test_digest():
assert b'bing' == check("bing","gnib")
assert check("bing","bad") is None
#
# The following code uses sockets to test the functionality,
# to enable use:
#
# $ TEST_SOCKET py.test
#
if os.environ.get("TEST_SOCKET",""):
from six.moves.urllib.error import HTTPError
from six.moves.urllib.request import build_opener, HTTPDigestAuthHandler
from paste.debug.testserver import serve
server = serve(application)
def authfetch(username,password,path="/",realm=realm):
server.accept(2)
import socket
socket.setdefaulttimeout(5)
uri = ("http://%s:%s" % server.server_address) + path
auth = HTTPDigestAuthHandler()
auth.add_password(realm,uri,username,password)
opener = build_opener(auth)
result = opener.open(uri)
return result.read()
def test_success():
assert "bing" == authfetch('bing','gnib')
def test_failure():
# urllib tries 5 more times before it gives up
server.accept(5)
try:
authfetch('bing','wrong')
assert False, "this should raise an exception"
except HTTPError as e:
assert e.code == 401
def test_shutdown():
server.stop()
|
ravindrapanda/tensorflow
|
refs/heads/master
|
tensorflow/contrib/learn/python/learn/graph_actions.py
|
76
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""High level operations on graphs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import sys
import threading
import time
import numpy as np
from six import reraise
from tensorflow.contrib.framework import load_variable
from tensorflow.contrib.framework.python.ops import ops as contrib_ops
from tensorflow.contrib.framework.python.ops import variables as contrib_variables
from tensorflow.contrib.learn.python.learn import monitors as monitors_lib
from tensorflow.core.framework import summary_pb2
from tensorflow.python.client import session as tf_session
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import logging_ops
from tensorflow.python.ops import lookup_ops
from tensorflow.python.ops import resources
from tensorflow.python.ops import variables
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.training import coordinator
from tensorflow.python.training import queue_runner
from tensorflow.python.training import saver as tf_saver
from tensorflow.python.training import session_manager as session_manager_lib
from tensorflow.python.training import summary_io
from tensorflow.python.training import supervisor as tf_supervisor
from tensorflow.python.util.deprecation import deprecated
# Singleton for SummaryWriter per logdir folder.
_SUMMARY_WRITERS = {}
# Lock protecting _SUMMARY_WRITERS
_summary_writer_lock = threading.Lock()
_graph_action_deprecation = deprecated(
'2017-02-15',
'graph_actions.py will be deleted. Use tf.train.* utilities instead. '
'You can use learn/estimators/estimator.py as an example.')
@_graph_action_deprecation
def clear_summary_writers():
"""Clear cached summary writers. Currently only used for unit tests."""
return summary_io.SummaryWriterCache.clear()
def get_summary_writer(logdir):
"""Returns single SummaryWriter per logdir in current run.
Args:
logdir: str, folder to write summaries.
Returns:
Existing `SummaryWriter` object or new one if never wrote to given
directory.
"""
return summary_io.SummaryWriterCache.get(logdir)
def _make_saver(graph, keep_checkpoint_max=5):
vars_to_save = (graph.get_collection(ops.GraphKeys.GLOBAL_VARIABLES) +
graph.get_collection(ops.GraphKeys.SAVEABLE_OBJECTS))
if vars_to_save:
return tf_saver.Saver(vars_to_save,
sharded=True,
max_to_keep=keep_checkpoint_max)
else:
return None
def _restore_from_checkpoint(session, graph, checkpoint_path, saver=None):
logging.info('Loading model from checkpoint: %s.', checkpoint_path)
saver = saver or _make_saver(graph)
if saver:
saver.restore(session, checkpoint_path)
else:
logging.info('No variables found in graph, not creating Saver() object.')
def _run_with_monitors(session, step, tensors, feed_dict, monitors):
"""Runs session for given tensors with monitor callbacks."""
for monitor in monitors:
tensors += monitor.step_begin(step)
tensors = list(set(tensors))
outputs = session.run(tensors, feed_dict=feed_dict)
outputs = dict(zip(
[t.name if isinstance(t, ops.Tensor) else t for t in tensors],
outputs))
should_stop = False
for monitor in monitors:
induce_stop = monitor.step_end(step, outputs)
should_stop = should_stop or induce_stop
return outputs, should_stop
@_graph_action_deprecation
def train(graph,
output_dir,
train_op,
loss_op,
global_step_tensor=None,
init_op=None,
init_feed_dict=None,
init_fn=None,
log_every_steps=10,
supervisor_is_chief=True,
supervisor_master='',
supervisor_save_model_secs=600,
keep_checkpoint_max=5,
supervisor_save_summaries_steps=100,
feed_fn=None,
steps=None,
fail_on_nan_loss=True,
monitors=None,
max_steps=None):
"""Train a model.
Given `graph`, a directory to write outputs to (`output_dir`), and some ops,
run a training loop. The given `train_op` performs one step of training on the
model. The `loss_op` represents the objective function of the training. It is
expected to increment the `global_step_tensor`, a scalar integer tensor
counting training steps. This function uses `Supervisor` to initialize the
graph (from a checkpoint if one is available in `output_dir`), write summaries
defined in the graph, and write regular checkpoints as defined by
`supervisor_save_model_secs`.
Training continues until `global_step_tensor` evaluates to `max_steps`, or, if
`fail_on_nan_loss`, until `loss_op` evaluates to `NaN`. In that case the
program is terminated with exit code 1.
Args:
graph: A graph to train. It is expected that this graph is not in use
elsewhere.
output_dir: A directory to write outputs to.
train_op: An op that performs one training step when run.
loss_op: A scalar loss tensor.
global_step_tensor: A tensor representing the global step. If none is given,
one is extracted from the graph using the same logic as in `Supervisor`.
init_op: An op that initializes the graph. If `None`, use `Supervisor`'s
default.
init_feed_dict: A dictionary that maps `Tensor` objects to feed values.
This feed dictionary will be used when `init_op` is evaluated.
init_fn: Optional callable passed to Supervisor to initialize the model.
log_every_steps: Output logs regularly. The logs contain timing data and the
current loss.
supervisor_is_chief: Whether the current process is the chief supervisor in
charge of restoring the model and running standard services.
supervisor_master: The master string to use when preparing the session.
supervisor_save_model_secs: Save a checkpoint every
`supervisor_save_model_secs` seconds when training.
keep_checkpoint_max: The maximum number of recent checkpoint files to
keep. As new files are created, older files are deleted. If None or 0,
all checkpoint files are kept. This is simply passed as the max_to_keep
arg to tf.train.Saver constructor.
supervisor_save_summaries_steps: Save summaries every
`supervisor_save_summaries_steps` seconds when training.
feed_fn: A function that is called every iteration to produce a `feed_dict`
passed to `session.run` calls. Optional.
steps: Trains for this many steps (e.g. current global step + `steps`).
fail_on_nan_loss: If true, raise `NanLossDuringTrainingError` if `loss_op`
evaluates to `NaN`. If false, continue training as if nothing happened.
monitors: List of `BaseMonitor` subclass instances. Used for callbacks
inside the training loop.
max_steps: Number of total steps for which to train model. If `None`,
train forever. Two calls fit(steps=100) means 200 training iterations.
On the other hand two calls of fit(max_steps=100) means, second call
will not do any iteration since first call did all 100 steps.
Returns:
The final loss value.
Raises:
ValueError: If `output_dir`, `train_op`, `loss_op`, or `global_step_tensor`
is not provided. See `tf.contrib.framework.get_global_step` for how we
look up the latter if not provided explicitly.
NanLossDuringTrainingError: If `fail_on_nan_loss` is `True`, and loss ever
evaluates to `NaN`.
ValueError: If both `steps` and `max_steps` are not `None`.
"""
while True:
try:
return _train_internal(graph,
output_dir,
train_op,
loss_op,
global_step_tensor,
init_op,
init_feed_dict,
init_fn,
log_every_steps,
supervisor_is_chief,
supervisor_master,
supervisor_save_model_secs,
keep_checkpoint_max,
supervisor_save_summaries_steps,
feed_fn,
steps,
fail_on_nan_loss,
monitors,
max_steps)
except errors.AbortedError:
# Happens when PS restarts, keep training.
logging.warning('Training got Aborted error. Keep training.')
def _train_internal(graph,
output_dir,
train_op,
loss_op,
global_step_tensor,
init_op,
init_feed_dict,
init_fn,
log_every_steps,
supervisor_is_chief,
supervisor_master,
supervisor_save_model_secs,
keep_checkpoint_max,
supervisor_save_summaries_steps,
feed_fn,
steps,
fail_on_nan_loss,
monitors,
max_steps):
"""See train."""
if (steps is not None) and (max_steps is not None):
raise ValueError('Can not provide both steps and max_steps.')
if not output_dir:
raise ValueError('Output directory should be non-empty %s.' % output_dir)
if train_op is None:
raise ValueError('Missing train_op.')
if loss_op is None:
raise ValueError('Missing loss_op.')
with graph.as_default():
global_step_tensor = contrib_variables.assert_or_get_global_step(
graph, global_step_tensor)
if global_step_tensor is None:
raise ValueError('No "global_step" was provided or found in the graph.')
# Get current step.
try:
start_step = load_variable(output_dir, global_step_tensor.name)
except (errors.NotFoundError, ValueError):
start_step = 0
summary_writer = (get_summary_writer(output_dir)
if supervisor_is_chief else None)
# Add default chief monitors if none were provided.
if not monitors:
monitors = monitors_lib.get_default_monitors(
loss_op=loss_op,
summary_op=logging_ops.get_summary_op(),
save_summary_steps=supervisor_save_summaries_steps,
summary_writer=summary_writer) if supervisor_is_chief else []
# TODO(ipolosukhin): Replace all functionality of Supervisor
# with Chief-Exclusive Monitors.
if not supervisor_is_chief:
# Prune list of monitor to the ones runnable on all workers.
monitors = [monitor for monitor in monitors if monitor.run_on_all_workers]
if max_steps is None:
max_steps = (start_step + steps) if steps else None
# Start monitors, can create graph parts.
for monitor in monitors:
monitor.begin(max_steps=max_steps)
supervisor = tf_supervisor.Supervisor(
graph,
init_op=init_op or tf_supervisor.Supervisor.USE_DEFAULT,
init_feed_dict=init_feed_dict,
is_chief=supervisor_is_chief,
logdir=output_dir,
saver=_make_saver(graph, keep_checkpoint_max),
global_step=global_step_tensor,
summary_op=None,
summary_writer=summary_writer,
save_model_secs=supervisor_save_model_secs,
init_fn=init_fn)
session = supervisor.PrepareSession(master=supervisor_master,
start_standard_services=True)
supervisor.StartQueueRunners(session)
with session:
get_current_step = lambda: session.run(global_step_tensor)
start_step = get_current_step()
last_step = start_step
last_log_step = start_step
loss_value = None
logging.info('Training steps [%d,%s)', last_step, 'inf'
if max_steps is None else str(max_steps))
excinfo = None
try:
while not supervisor.ShouldStop() and (
(max_steps is None) or (last_step < max_steps)):
start_time = time.time()
feed_dict = feed_fn() if feed_fn is not None else None
outputs, should_stop = _run_with_monitors(
session, last_step + 1, [train_op, loss_op], feed_dict, monitors)
loss_value = outputs[loss_op.name]
if np.isnan(loss_value):
failure_message = 'Model diverged with loss = NaN.'
if fail_on_nan_loss:
logging.error(failure_message)
raise monitors_lib.NanLossDuringTrainingError()
else:
logging.warning(failure_message)
if should_stop:
break
this_step = get_current_step()
if this_step <= last_step:
logging.error(
'Global step was not incremented by train op at step %s'
': new step %d', last_step, this_step)
last_step = this_step
is_last_step = (max_steps is not None) and (last_step >= max_steps)
if is_last_step or (last_step - last_log_step >= log_every_steps):
logging.info(
'training step %d, loss = %.5f (%.3f sec/batch).',
last_step, loss_value, float(time.time() - start_time))
last_log_step = last_step
except errors.OutOfRangeError as e:
logging.warn('Got exception during tf.learn training loop possibly '
'due to exhausted input queue %s.', e)
except StopIteration:
logging.info('Exhausted input iterarator.')
except BaseException as e: # pylint: disable=broad-except
# Hold on to any other exceptions while we try recording a final
# checkpoint and summary.
excinfo = sys.exc_info()
finally:
try:
# Call supervisor.Stop() from within a try block because it re-raises
# exceptions thrown by the supervised threads.
supervisor.Stop(close_summary_writer=False)
# Save one last checkpoint and summaries
# TODO(wicke): This should be handled by Supervisor
# In case we encountered an exception in the try block before we updated
# last_step, update it here (again).
last_step = get_current_step()
if supervisor_is_chief:
ckpt_path = supervisor.save_path
logging.info('Saving checkpoint for step %d to checkpoint: %s.',
last_step, ckpt_path)
supervisor.saver.save(session, ckpt_path, global_step=last_step)
# Finish monitors.
for monitor in monitors:
monitor.end()
# catch OutOfRangeError which is thrown when queue is out of data (and for
# other reasons as well).
except errors.OutOfRangeError as e:
logging.warn('OutOfRangeError in tf.learn final checkpoint possibly '
'due to exhausted input queue. Note: summary_op is not '
'expected to trigger dequeues. %s.', e)
except BaseException as e: # pylint: disable=broad-except
# If we don't already have an exception to re-raise, raise this one.
if not excinfo:
raise
# Otherwise, log this one and raise the other in the finally block.
logging.error('Got exception during tf.learn final checkpoint %s.', e)
finally:
if excinfo:
reraise(*excinfo)
return loss_value
def _get_first_op_from_collection(collection_name):
elements = ops.get_collection(collection_name)
if elements:
return elements[0]
return None
def _get_saver():
"""Lazy init and return saver."""
saver = _get_first_op_from_collection(ops.GraphKeys.SAVERS)
if saver is None and variables.global_variables():
saver = tf_saver.Saver()
ops.add_to_collection(ops.GraphKeys.SAVERS, saver)
return saver
def _get_ready_op():
ready_op = _get_first_op_from_collection(ops.GraphKeys.READY_OP)
if ready_op is None:
ready_op = variables.report_uninitialized_variables()
ops.add_to_collection(ops.GraphKeys.READY_OP, ready_op)
return ready_op
def _get_local_init_op():
"""Returns the local init ops to initialize tables and local variables."""
local_init_op = _get_first_op_from_collection(
ops.GraphKeys.LOCAL_INIT_OP)
if local_init_op is None:
op_list = [
variables.local_variables_initializer(),
lookup_ops.tables_initializer()
]
if op_list:
local_init_op = control_flow_ops.group(*op_list)
ops.add_to_collection(ops.GraphKeys.LOCAL_INIT_OP, local_init_op)
return local_init_op
def _eval_results_to_str(eval_results):
return ', '.join('%s = %s' % (k, v) for k, v in sorted(eval_results.items()))
def _write_summary_results(output_dir, eval_results, current_global_step):
"""Writes eval results into summary file in given dir."""
logging.info('Saving evaluation summary for step %d: %s', current_global_step,
_eval_results_to_str(eval_results))
summary_writer = get_summary_writer(output_dir)
summary = summary_pb2.Summary()
for key in eval_results:
if eval_results[key] is None:
continue
value = summary.value.add()
value.tag = key
if (isinstance(eval_results[key], np.float32) or
isinstance(eval_results[key], float)):
value.simple_value = float(eval_results[key])
else:
logging.warn('Skipping summary for %s, must be a float or np.float32.',
key)
summary_writer.add_summary(summary, current_global_step)
summary_writer.flush()
@_graph_action_deprecation
def evaluate(graph,
output_dir,
checkpoint_path,
eval_dict,
update_op=None,
global_step_tensor=None,
supervisor_master='',
log_every_steps=10,
feed_fn=None,
max_steps=None):
"""Evaluate a model loaded from a checkpoint.
Given `graph`, a directory to write summaries to (`output_dir`), a checkpoint
to restore variables from, and a `dict` of `Tensor`s to evaluate, run an eval
loop for `max_steps` steps, or until an exception (generally, an
end-of-input signal from a reader operation) is raised from running
`eval_dict`.
In each step of evaluation, all tensors in the `eval_dict` are evaluated, and
every `log_every_steps` steps, they are logged. At the very end of evaluation,
a summary is evaluated (finding the summary ops using `Supervisor`'s logic)
and written to `output_dir`.
Args:
graph: A `Graph` to train. It is expected that this graph is not in use
elsewhere.
output_dir: A string containing the directory to write a summary to.
checkpoint_path: A string containing the path to a checkpoint to restore.
Can be `None` if the graph doesn't require loading any variables.
eval_dict: A `dict` mapping string names to tensors to evaluate. It is
evaluated in every logging step. The result of the final evaluation is
returned. If `update_op` is None, then it's evaluated in every step. If
`max_steps` is `None`, this should depend on a reader that will raise an
end-of-input exception when the inputs are exhausted.
update_op: A `Tensor` which is run in every step.
global_step_tensor: A `Variable` containing the global step. If `None`,
one is extracted from the graph using the same logic as in `Supervisor`.
Used to place eval summaries on training curves.
supervisor_master: The master string to use when preparing the session.
log_every_steps: Integer. Output logs every `log_every_steps` evaluation
steps. The logs contain the `eval_dict` and timing information.
feed_fn: A function that is called every iteration to produce a `feed_dict`
passed to `session.run` calls. Optional.
max_steps: Integer. Evaluate `eval_dict` this many times.
Returns:
A tuple `(eval_results, global_step)`:
eval_results: A `dict` mapping `string` to numeric values (`int`, `float`)
that are the result of running eval_dict in the last step. `None` if no
eval steps were run.
global_step: The global step this evaluation corresponds to.
Raises:
ValueError: if `output_dir` is empty.
"""
if not output_dir:
raise ValueError('Output directory should be non-empty %s.' % output_dir)
with graph.as_default():
global_step_tensor = contrib_variables.assert_or_get_global_step(
graph, global_step_tensor)
# Create or get summary op, global_step and saver.
saver = _get_saver()
local_init_op = _get_local_init_op()
ready_for_local_init_op = _get_first_op_from_collection(
ops.GraphKeys.READY_FOR_LOCAL_INIT_OP)
ready_op = _get_ready_op()
session_manager = session_manager_lib.SessionManager(
local_init_op=local_init_op,
ready_op=ready_op,
ready_for_local_init_op=ready_for_local_init_op)
session, initialized = session_manager.recover_session(
master=supervisor_master,
saver=saver,
checkpoint_dir=checkpoint_path)
# Start queue runners.
coord = coordinator.Coordinator()
threads = queue_runner.start_queue_runners(session, coord)
with session:
if not initialized:
logging.warning('Failed to initialize from %s.', checkpoint_path)
# TODO(ipolosukhin): This should be failing, but old code relies on that.
session.run(variables.global_variables_initializer())
if checkpoint_path:
_restore_from_checkpoint(session, graph, checkpoint_path, saver)
current_global_step = session.run(global_step_tensor)
eval_results = None
# TODO(amodei): Fix this to run through the eval set exactly once.
step = 0
eval_step = None
feed_dict = None
logging.info('Eval steps [%d,%s) for training step %d.', step,
'inf' if max_steps is None
else str(max_steps), current_global_step)
try:
try:
while (max_steps is None) or (step < max_steps):
step += 1
start_time = time.time()
feed_dict = feed_fn() if feed_fn is not None else None
if update_op is not None:
session.run(update_op, feed_dict=feed_dict)
else:
eval_results = session.run(eval_dict, feed_dict=feed_dict)
eval_step = step
# TODO(wicke): We should assert that the global step hasn't changed.
if step % log_every_steps == 0:
if eval_step is None or step != eval_step:
eval_results = session.run(eval_dict, feed_dict=feed_dict)
eval_step = step
duration = time.time() - start_time
logging.info('Results after %d steps (%.3f sec/batch): %s.',
step, float(duration),
_eval_results_to_str(eval_results))
finally:
if eval_results is None or step != eval_step:
eval_results = session.run(eval_dict, feed_dict=feed_dict)
eval_step = step
# Stop session first, before queue runners.
session.close()
# Stop queue runners.
try:
coord.request_stop()
coord.join(threads, stop_grace_period_secs=120)
except (RuntimeError, errors.CancelledError) as e:
logging.warning('Coordinator didn\'t stop cleanly: %s', e)
# catch OutOfRangeError which is thrown when queue is out of data (and for
# other reasons as well).
except errors.OutOfRangeError as e:
if max_steps is None:
logging.info('Input queue is exhausted.')
else:
logging.warn('Input queue is exhausted: %s.', e)
# catch StopIteration which is thrown is DataReader is out of data.
except StopIteration as e:
if max_steps is None:
logging.info('Input iterator is exhausted.')
else:
logging.warn('Input iterator is exhausted: %s.', e)
# Save summaries for this evaluation.
_write_summary_results(output_dir, eval_results, current_global_step)
return eval_results, current_global_step
@_graph_action_deprecation
def run_n(output_dict, feed_dict=None, restore_checkpoint_path=None, n=1):
"""Run `output_dict` tensors `n` times, with the same `feed_dict` each run.
Args:
output_dict: A `dict` mapping string names to tensors to run. Must all be
from the same graph.
feed_dict: `dict` of input values to feed each run.
restore_checkpoint_path: A string containing the path to a checkpoint to
restore.
n: Number of times to repeat.
Returns:
A list of `n` `dict` objects, each containing values read from `output_dict`
tensors.
"""
return run_feeds(
output_dict=output_dict,
feed_dicts=itertools.repeat(feed_dict, n),
restore_checkpoint_path=restore_checkpoint_path)
@_graph_action_deprecation
def run_feeds_iter(output_dict, feed_dicts, restore_checkpoint_path=None):
"""Run `output_dict` tensors with each input in `feed_dicts`.
If `restore_checkpoint_path` is supplied, restore from checkpoint. Otherwise,
init all variables.
Args:
output_dict: A `dict` mapping string names to `Tensor` objects to run.
Tensors must all be from the same graph.
feed_dicts: Iterable of `dict` objects of input values to feed.
restore_checkpoint_path: A string containing the path to a checkpoint to
restore.
Yields:
A sequence of dicts of values read from `output_dict` tensors, one item
yielded for each item in `feed_dicts`. Keys are the same as `output_dict`,
values are the results read from the corresponding `Tensor` in
`output_dict`.
Raises:
ValueError: if `output_dict` or `feed_dicts` is None or empty.
"""
if not output_dict:
raise ValueError('output_dict is invalid: %s.' % output_dict)
if not feed_dicts:
raise ValueError('feed_dicts is invalid: %s.' % feed_dicts)
graph = contrib_ops.get_graph_from_inputs(output_dict.values())
with graph.as_default() as g:
with tf_session.Session('') as session:
session.run(
resources.initialize_resources(resources.shared_resources() +
resources.local_resources()))
if restore_checkpoint_path:
_restore_from_checkpoint(session, g, restore_checkpoint_path)
else:
session.run(variables.global_variables_initializer())
session.run(variables.local_variables_initializer())
session.run(lookup_ops.tables_initializer())
coord = coordinator.Coordinator()
threads = None
try:
threads = queue_runner.start_queue_runners(session, coord=coord)
for f in feed_dicts:
yield session.run(output_dict, f)
finally:
coord.request_stop()
if threads:
coord.join(threads, stop_grace_period_secs=120)
@_graph_action_deprecation
def run_feeds(*args, **kwargs):
"""See run_feeds_iter(). Returns a `list` instead of an iterator."""
return list(run_feeds_iter(*args, **kwargs))
@_graph_action_deprecation
def infer(restore_checkpoint_path, output_dict, feed_dict=None):
"""Restore graph from `restore_checkpoint_path` and run `output_dict` tensors.
If `restore_checkpoint_path` is supplied, restore from checkpoint. Otherwise,
init all variables.
Args:
restore_checkpoint_path: A string containing the path to a checkpoint to
restore.
output_dict: A `dict` mapping string names to `Tensor` objects to run.
Tensors must all be from the same graph.
feed_dict: `dict` object mapping `Tensor` objects to input values to feed.
Returns:
Dict of values read from `output_dict` tensors. Keys are the same as
`output_dict`, values are the results read from the corresponding `Tensor`
in `output_dict`.
Raises:
ValueError: if `output_dict` or `feed_dicts` is None or empty.
"""
return run_feeds(output_dict=output_dict,
feed_dicts=[feed_dict] if feed_dict is not None else [None],
restore_checkpoint_path=restore_checkpoint_path)[0]
|
GalaxyTab4/android_kernel_samsung_millet
|
refs/heads/millet
|
tools/perf/python/twatch.py
|
7370
|
#! /usr/bin/python
# -*- python -*-
# -*- coding: utf-8 -*-
# twatch - Experimental use of the perf python interface
# Copyright (C) 2011 Arnaldo Carvalho de Melo <acme@redhat.com>
#
# This application is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; version 2.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
import perf
def main():
cpus = perf.cpu_map()
threads = perf.thread_map()
evsel = perf.evsel(task = 1, comm = 1, mmap = 0,
wakeup_events = 1, watermark = 1,
sample_id_all = 1,
sample_type = perf.SAMPLE_PERIOD | perf.SAMPLE_TID | perf.SAMPLE_CPU | perf.SAMPLE_TID)
evsel.open(cpus = cpus, threads = threads);
evlist = perf.evlist(cpus, threads)
evlist.add(evsel)
evlist.mmap()
while True:
evlist.poll(timeout = -1)
for cpu in cpus:
event = evlist.read_on_cpu(cpu)
if not event:
continue
print "cpu: %2d, pid: %4d, tid: %4d" % (event.sample_cpu,
event.sample_pid,
event.sample_tid),
print event
if __name__ == '__main__':
main()
|
maestrano/odoo
|
refs/heads/master
|
addons/account_bank_statement_extensions/wizard/confirm_statement_line.py
|
381
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class confirm_statement_line(osv.osv_memory):
_name = 'confirm.statement.line'
_description = 'Confirm selected statement lines'
def confirm_lines(self, cr, uid, ids, context):
line_ids = context['active_ids']
line_obj = self.pool.get('account.bank.statement.line')
line_obj.write(cr, uid, line_ids, {'state': 'confirm'}, context=context)
return {}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
igraph/python-igraph
|
refs/heads/master
|
tests/test_conversion.py
|
1
|
import random
import unittest
from igraph import Graph, Matrix
class DirectedUndirectedTests(unittest.TestCase):
def testToUndirected(self):
graph = Graph([(0, 1), (0, 2), (1, 0)], directed=True)
graph2 = graph.copy()
graph2.to_undirected(mode=False)
self.assertTrue(graph2.vcount() == graph.vcount())
self.assertTrue(graph2.is_directed() == False)
self.assertTrue(sorted(graph2.get_edgelist()) == [(0, 1), (0, 1), (0, 2)])
graph2 = graph.copy()
graph2.to_undirected()
self.assertTrue(graph2.vcount() == graph.vcount())
self.assertTrue(graph2.is_directed() == False)
self.assertTrue(sorted(graph2.get_edgelist()) == [(0, 1), (0, 2)])
graph2 = graph.copy()
graph2.es["weight"] = [1, 2, 3]
graph2.to_undirected(mode="collapse", combine_edges="sum")
self.assertTrue(graph2.vcount() == graph.vcount())
self.assertTrue(graph2.is_directed() == False)
self.assertTrue(sorted(graph2.get_edgelist()) == [(0, 1), (0, 2)])
self.assertTrue(graph2.es["weight"] == [4, 2])
graph = Graph([(0, 1), (1, 0), (0, 1), (1, 0), (2, 1), (1, 2)], directed=True)
graph2 = graph.copy()
graph2.es["weight"] = [1, 2, 3, 4, 5, 6]
graph2.to_undirected(mode="mutual", combine_edges="sum")
self.assertTrue(graph2.vcount() == graph.vcount())
self.assertTrue(graph2.is_directed() == False)
self.assertTrue(sorted(graph2.get_edgelist()) == [(0, 1), (0, 1), (1, 2)])
self.assertTrue(
graph2.es["weight"] == [7, 3, 11] or graph2.es["weight"] == [3, 7, 11]
)
def testToDirectedNoModeArg(self):
graph = Graph([(0, 1), (0, 2), (2, 3), (2, 4)], directed=False)
graph.to_directed()
self.assertTrue(graph.is_directed())
self.assertTrue(graph.vcount() == 5)
self.assertTrue(
sorted(graph.get_edgelist())
== [(0, 1), (0, 2), (1, 0), (2, 0), (2, 3), (2, 4), (3, 2), (4, 2)]
)
def testToDirectedMutual(self):
graph = Graph([(0, 1), (0, 2), (2, 3), (2, 4)], directed=False)
graph.to_directed("mutual")
self.assertTrue(graph.is_directed())
self.assertTrue(graph.vcount() == 5)
self.assertTrue(
sorted(graph.get_edgelist())
== [(0, 1), (0, 2), (1, 0), (2, 0), (2, 3), (2, 4), (3, 2), (4, 2)]
)
def testToDirectedAcyclic(self):
graph = Graph([(0, 1), (2, 0), (3, 0), (3, 0), (4, 2)], directed=False)
graph.to_directed("acyclic")
self.assertTrue(graph.is_directed())
self.assertTrue(graph.vcount() == 5)
self.assertTrue(
sorted(graph.get_edgelist())
== [(0, 1), (0, 2), (0, 3), (0, 3), (2, 4)]
)
def testToDirectedRandom(self):
random.seed(0)
graph = Graph.Ring(200, directed=False)
graph.to_directed("random")
self.assertTrue(graph.is_directed())
self.assertTrue(graph.vcount() == 200)
edgelist1 = sorted(graph.get_edgelist())
graph = Graph.Ring(200, directed=False)
graph.to_directed("random")
self.assertTrue(graph.is_directed())
self.assertTrue(graph.vcount() == 200)
edgelist2 = sorted(graph.get_edgelist())
self.assertTrue(edgelist1 != edgelist2)
def testToDirectedInvalidMode(self):
graph = Graph([(0, 1), (0, 2), (2, 3), (2, 4)], directed=False)
with self.assertRaises(ValueError):
graph.to_directed("no-such-mode")
class GraphRepresentationTests(unittest.TestCase):
def testGetAdjacency(self):
# Undirected case
g = Graph.Tree(6, 3)
g.es["weight"] = list(range(5))
self.assertTrue(
g.get_adjacency()
== Matrix(
[
[0, 1, 1, 1, 0, 0],
[1, 0, 0, 0, 1, 1],
[1, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
[0, 1, 0, 0, 0, 0],
]
)
)
self.assertTrue(
g.get_adjacency(attribute="weight")
== Matrix(
[
[0, 0, 1, 2, 0, 0],
[0, 0, 0, 0, 3, 4],
[1, 0, 0, 0, 0, 0],
[2, 0, 0, 0, 0, 0],
[0, 3, 0, 0, 0, 0],
[0, 4, 0, 0, 0, 0],
]
)
)
self.assertTrue(
g.get_adjacency(eids=True)
== Matrix(
[
[0, 1, 2, 3, 0, 0],
[1, 0, 0, 0, 4, 5],
[2, 0, 0, 0, 0, 0],
[3, 0, 0, 0, 0, 0],
[0, 4, 0, 0, 0, 0],
[0, 5, 0, 0, 0, 0],
]
)
- 1
)
# Directed case
g = Graph.Tree(6, 3, "tree_out")
g.add_edges([(0, 1), (1, 0)])
self.assertTrue(
g.get_adjacency()
== Matrix(
[
[0, 2, 1, 1, 0, 0],
[1, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
]
)
)
def testGetSparseAdjacency(self):
try:
from scipy import sparse
import numpy as np
except ImportError:
self.skipTest("Scipy and numpy are dependencies of this test.")
# Undirected case
g = Graph.Tree(6, 3)
g.es["weight"] = list(range(5))
self.assertTrue(
np.all((g.get_adjacency_sparse() == np.array(g.get_adjacency().data)))
)
self.assertTrue(
np.all(
(
g.get_adjacency_sparse(attribute="weight")
== np.array(g.get_adjacency(attribute="weight").data)
)
)
)
# Directed case
g = Graph.Tree(6, 3, "tree_out")
g.add_edges([(0, 1), (1, 0)])
self.assertTrue(
np.all(g.get_adjacency_sparse() == np.array(g.get_adjacency().data))
)
def suite():
direction_suite = unittest.makeSuite(DirectedUndirectedTests)
representation_suite = unittest.makeSuite(GraphRepresentationTests)
return unittest.TestSuite([direction_suite, representation_suite])
def test():
runner = unittest.TextTestRunner()
runner.run(suite())
if __name__ == "__main__":
test()
|
ormnv/os_final_project
|
refs/heads/master
|
django/core/management/commands/validate.py
|
239
|
from django.core.management.base import NoArgsCommand
class Command(NoArgsCommand):
help = "Validates all installed models."
requires_model_validation = False
def handle_noargs(self, **options):
self.validate(display_num_errors=True)
|
zoincoin/zoin
|
refs/heads/master
|
share/qt/make_spinner.py
|
4415
|
#!/usr/bin/env python
# W.J. van der Laan, 2011
# Make spinning .mng animation from a .png
# Requires imagemagick 6.7+
from __future__ import division
from os import path
from PIL import Image
from subprocess import Popen
SRC='img/reload_scaled.png'
DST='../../src/qt/res/movies/update_spinner.mng'
TMPDIR='/tmp'
TMPNAME='tmp-%03i.png'
NUMFRAMES=35
FRAMERATE=10.0
CONVERT='convert'
CLOCKWISE=True
DSIZE=(16,16)
im_src = Image.open(SRC)
if CLOCKWISE:
im_src = im_src.transpose(Image.FLIP_LEFT_RIGHT)
def frame_to_filename(frame):
return path.join(TMPDIR, TMPNAME % frame)
frame_files = []
for frame in xrange(NUMFRAMES):
rotation = (frame + 0.5) / NUMFRAMES * 360.0
if CLOCKWISE:
rotation = -rotation
im_new = im_src.rotate(rotation, Image.BICUBIC)
im_new.thumbnail(DSIZE, Image.ANTIALIAS)
outfile = frame_to_filename(frame)
im_new.save(outfile, 'png')
frame_files.append(outfile)
p = Popen([CONVERT, "-delay", str(FRAMERATE), "-dispose", "2"] + frame_files + [DST])
p.communicate()
|
IanBoyanZhang/MVP
|
refs/heads/master
|
server/config/twitterstream.py
|
2
|
import oauth2 as oauth
import urllib2 as urllib
from credentials import *
# See assignment1.html instructions or README for how to get these credentials
_debug = 0
oauth_token = oauth.Token(key=access_token_key, secret=access_token_secret)
oauth_consumer = oauth.Consumer(key=api_key, secret=api_secret)
signature_method_hmac_sha1 = oauth.SignatureMethod_HMAC_SHA1()
http_method = "GET"
http_handler = urllib.HTTPHandler(debuglevel=_debug)
https_handler = urllib.HTTPSHandler(debuglevel=_debug)
'''
Construct, sign, and open a twitter request
using the hard-coded credentials above.
'''
def twitterreq(url, method, parameters):
req = oauth.Request.from_consumer_and_token(oauth_consumer,
token=oauth_token,
http_method=http_method,
http_url=url,
parameters=parameters)
req.sign_request(signature_method_hmac_sha1, oauth_consumer, oauth_token)
headers = req.to_header()
if http_method == "POST":
encoded_post_data = req.to_postdata()
else:
encoded_post_data = None
url = req.to_url()
opener = urllib.OpenerDirector()
opener.add_handler(http_handler)
opener.add_handler(https_handler)
response = opener.open(url, encoded_post_data)
return response
# url = "https://stream.twitter.com/1.1/statuses/filter.json?delimited=length&track=twitterapi"
# url = "https://api.twitter.com/1.1/geo/search.json?query=Toronto"
# url = "https://api.twitter.com/1.1/geo/search.json?lat=37.78&long=-122.40"
# url = "https://api.twitter.com/1.1/geo/search.json?granularity=city&lat=37.774546&long=-122.433523"
def fetchsamples():
url = "https://stream.twitter.com/1/statuses/sample.json"
parameters = []
response = twitterreq(url, "GET", parameters)
for line in response:
print line.strip()
if __name__ == '__main__':
fetchsamples()
|
marco-hoyer/cfn-sphere
|
refs/heads/master
|
src/main/python/cfn_sphere/util.py
|
2
|
import json
import logging
import os
import time
from functools import wraps
import yaml
from dateutil import parser
from git import Repo, InvalidGitRepositoryError
from prettytable import PrettyTable
from six.moves.urllib import request as urllib2
from cfn_sphere.exceptions import CfnSphereException, CfnSphereBotoError
def timed(function):
logger = logging.getLogger(__name__)
@wraps(function)
def wrapper(*args, **kwds):
start = time.time()
result = function(*args, **kwds)
elapsed = time.time() - start
logger.debug("Execution of {0} required {1}s".format(function.__name__, round(elapsed, 2)))
return result
return wrapper
def get_logger(root=False):
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%d.%m.%Y %H:%M:%S')
if root:
return logging.getLogger('cfn_sphere')
else:
return logging.getLogger('cfn_sphere.{0}'.format(__name__))
def convert_file(file_path):
if file_path.lower().endswith('.json'):
convert = convert_json_to_yaml_string
elif file_path.lower().endswith('.yml'):
convert = convert_yaml_to_json_string
elif file_path.lower().endswith('.yaml'):
convert = convert_yaml_to_json_string
else:
raise Exception('Unknown file extension. Please use .yaml, .yml or .json!')
with open(file_path, 'r') as filestream:
return convert(filestream.read())
def kv_list_to_dict(items):
"""
Converts a list of strings with k=v to dict {k:v}
:param items: list(string)
:return: dict
"""
result = {}
for item in items:
parts = str(item).split("=")
if not len(parts) == 2:
raise CfnSphereException("Could not parse kv pair: {0}, please ensure it is passed as k=v".format(items))
result[parts[0]] = parts[1]
return result
def kv_list_string_to_dict(value):
"""
Converts a string with format k1=v1,k2=v2 to dict {k1:v1, k2:v2}
:param items: string
:return: dict
"""
items = value.split(",")
return kv_list_to_dict(items)
def get_pretty_parameters_string(stack):
table = PrettyTable(["Parameter", "Value"])
parameters = stack.parameters
no_echo_parameter_keys = stack.template.get_no_echo_parameter_keys()
for key, value in parameters.items():
if key in no_echo_parameter_keys:
table.add_row([key, "***"])
else:
table.add_row([key, value])
return table.get_string(sortby="Parameter")
def get_pretty_stack_outputs(stack_outputs):
table = PrettyTable(["Output", "Value"])
table_has_entries = False
for output in stack_outputs:
table_has_entries = True
table.add_row([output["OutputKey"], output["OutputValue"]])
if table_has_entries:
return table.get_string(sortby="Output")
else:
return None
def strip_string(string):
return string[:100] + "..."
def convert_json_to_yaml_string(data):
if not data:
return ''
return yaml.safe_dump(json.loads(data), default_flow_style=False)
def convert_yaml_to_json_string(data):
if not data:
return '{}'
return json.dumps(yaml.load(data), indent=2)
def convert_dict_to_json_string(data):
return json.dumps(data, indent=2)
def get_cfn_api_server_time():
url = "https://aws.amazon.com"
try:
header_date = urllib2.urlopen(url).info().get('Date')
return parser.parse(header_date)
except Exception as e:
raise CfnSphereException("Could not get AWS server time from {0}. Error: {1}".format(url, e))
def get_latest_version():
try:
package_info = get_pypi_package_description()
return package_info["info"]["version"]
except Exception:
return None
def get_pypi_package_description():
url = "https://pypi.python.org/pypi/cfn-sphere/json"
response = urllib2.urlopen(url, timeout=2)
return json.load(response)
def with_boto_retry(max_retries=3, pause_time_multiplier=5):
"""
Annotation retrying a wrapped function call if it raises a CfnSphereBotoError
with is_throttling_exception=True
:param max_retries:
:param pause_time_multiplier:
:return: :raise e:
"""
logger = get_logger()
def decorator(function):
@wraps(function)
def wrapper(*args, **kwds):
retries = 0
while True:
try:
return function(*args, **kwds)
except CfnSphereBotoError as e:
if not e.is_throttling_exception or retries >= max_retries:
raise e
sleep_time = pause_time_multiplier * (2 ** retries)
logger.warn(
"{0} call failed with: '{1}' (Will retry in {2}s)".format(function.__name__, e, sleep_time))
time.sleep(sleep_time)
retries += 1
return wrapper
return decorator
def get_git_repository_remote_url(working_dir):
if not working_dir:
return None
try:
repo = Repo(working_dir)
return repo.remotes.origin.url
except InvalidGitRepositoryError:
(head, tail) = os.path.split(working_dir)
if tail:
return get_git_repository_remote_url(head)
else:
return None
def get_resources_dir():
script_dir = os.path.dirname(os.path.realpath(__file__))
return os.path.realpath(os.path.join(script_dir, "../../resources"))
if __name__ == "__main__":
print(get_resources_dir())
|
initbrain/intelwiz
|
refs/heads/master
|
intelwiz/core/flowchart/Terminal.py
|
2
|
# -*- coding: utf-8 -*-
from pyqtgraph.Qt import QtCore, QtGui
import weakref
from pyqtgraph.graphicsItems.GraphicsObject import GraphicsObject
import pyqtgraph.functions as fn
from pyqtgraph.Point import Point
#from PySide import QtCore, QtGui
from .eq import *
class Terminal(object):
def __init__(self, node, name, io, optional=False, multi=False, pos=None, renamable=False, removable=False, multiable=False, bypass=None):
"""
Construct a new terminal.
============== =================================================================================
**Arguments:**
node the node to which this terminal belongs
name string, the name of the terminal
io 'in' or 'out'
optional bool, whether the node may process without connection to this terminal
multi bool, for inputs: whether this terminal may make multiple connections
for outputs: whether this terminal creates a different value for each connection
pos [x, y], the position of the terminal within its node's boundaries
renamable (bool) Whether the terminal can be renamed by the user
removable (bool) Whether the terminal can be removed by the user
multiable (bool) Whether the user may toggle the *multi* option for this terminal
bypass (str) Name of the terminal from which this terminal's value is derived
when the Node is in bypass mode.
============== =================================================================================
"""
self._io = io
#self._isOutput = opts[0] in ['out', 'io']
#self._isInput = opts[0]] in ['in', 'io']
#self._isIO = opts[0]=='io'
self._optional = optional
self._multi = multi
self._node = weakref.ref(node)
self._name = name
self._renamable = renamable
self._removable = removable
self._multiable = multiable
self._connections = {}
self._graphicsItem = TerminalGraphicsItem(self, parent=self._node().graphicsItem())
self._bypass = bypass
if multi:
self._value = {} ## dictionary of terminal:value pairs.
else:
self._value = None
self.valueOk = None
self.recolor()
def value(self, term=None):
"""Return the value this terminal provides for the connected terminal"""
if term is None:
return self._value
if self.isMultiValue():
return self._value.get(term, None)
else:
return self._value
def bypassValue(self):
return self._bypass
def setValue(self, val, process=True):
"""If this is a single-value terminal, val should be a single value.
If this is a multi-value terminal, val should be a dict of terminal:value pairs"""
if not self.isMultiValue():
if eq(val, self._value):
return
self._value = val
else:
if not isinstance(self._value, dict):
self._value = {}
if val is not None:
self._value.update(val)
self.setValueAcceptable(None) ## by default, input values are 'unchecked' until Node.update().
if self.isInput() and process:
self.node().update()
## Let the flowchart handle this.
#if self.isOutput():
#for c in self.connections():
#if c.isInput():
#c.inputChanged(self)
self.recolor()
def setOpts(self, **opts):
self._renamable = opts.get('renamable', self._renamable)
self._removable = opts.get('removable', self._removable)
self._multiable = opts.get('multiable', self._multiable)
if 'multi' in opts:
self.setMultiValue(opts['multi'])
def connected(self, term):
"""Called whenever this terminal has been connected to another. (note--this function is called on both terminals)"""
if self.isInput() and term.isOutput():
self.inputChanged(term)
if self.isOutput() and self.isMultiValue():
self.node().update()
self.node().connected(self, term)
def disconnected(self, term):
"""Called whenever this terminal has been disconnected from another. (note--this function is called on both terminals)"""
if self.isMultiValue() and term in self._value:
del self._value[term]
self.node().update()
#self.recolor()
else:
if self.isInput():
self.setValue(None)
self.node().disconnected(self, term)
#self.node().update()
def inputChanged(self, term, process=True):
"""Called whenever there is a change to the input value to this terminal.
It may often be useful to override this function."""
if self.isMultiValue():
self.setValue({term: term.value(self)}, process=process)
else:
self.setValue(term.value(self), process=process)
def valueIsAcceptable(self):
"""Returns True->acceptable None->unknown False->Unacceptable"""
return self.valueOk
def setValueAcceptable(self, v=True):
self.valueOk = v
self.recolor()
def connections(self):
return self._connections
def node(self):
return self._node()
def isInput(self):
return self._io == 'in'
def isMultiValue(self):
return self._multi
def setMultiValue(self, multi):
"""Set whether this is a multi-value terminal."""
self._multi = multi
if not multi and len(self.inputTerminals()) > 1:
self.disconnectAll()
for term in self.inputTerminals():
self.inputChanged(term)
def isOutput(self):
return self._io == 'out'
def isRenamable(self):
return self._renamable
def isRemovable(self):
return self._removable
def isMultiable(self):
return self._multiable
def name(self):
return self._name
def graphicsItem(self):
return self._graphicsItem
def isConnected(self):
return len(self.connections()) > 0
def connectedTo(self, term):
return term in self.connections()
def hasInput(self):
#conn = self.extendedConnections()
for t in self.connections():
if t.isOutput():
return True
return False
def inputTerminals(self):
"""Return the terminal(s) that give input to this one."""
#terms = self.extendedConnections()
#for t in terms:
#if t.isOutput():
#return t
return [t for t in self.connections() if t.isOutput()]
def dependentNodes(self):
"""Return the list of nodes which receive input from this terminal."""
#conn = self.extendedConnections()
#del conn[self]
return set([t.node() for t in self.connections() if t.isInput()])
def connectTo(self, term, connectionItem=None):
try:
if self.connectedTo(term):
raise Exception('Already connected')
if term is self:
raise Exception('Not connecting terminal to self')
if term.node() is self.node():
raise Exception("Can't connect to terminal on same node.")
for t in [self, term]:
if t.isInput() and not t._multi and len(t.connections()) > 0:
raise Exception("Cannot connect %s <-> %s: Terminal %s is already connected to %s (and does not allow multiple connections)" % (self, term, t, list(t.connections().keys())))
#if self.hasInput() and term.hasInput():
#raise Exception('Target terminal already has input')
#if term in self.node().terminals.values():
#if self.isOutput() or term.isOutput():
#raise Exception('Can not connect an output back to the same node.')
except:
if connectionItem is not None:
connectionItem.close()
raise
if connectionItem is None:
connectionItem = ConnectionItem(self.graphicsItem(), term.graphicsItem())
#self.graphicsItem().scene().addItem(connectionItem)
self.graphicsItem().getViewBox().addItem(connectionItem)
#connectionItem.setParentItem(self.graphicsItem().parent().parent())
self._connections[term] = connectionItem
term._connections[self] = connectionItem
self.recolor()
#if self.isOutput() and term.isInput():
#term.inputChanged(self)
#if term.isInput() and term.isOutput():
#self.inputChanged(term)
self.connected(term)
term.connected(self)
return connectionItem
def disconnectFrom(self, term):
if not self.connectedTo(term):
return
item = self._connections[term]
#print "removing connection", item
#item.scene().removeItem(item)
item.close()
del self._connections[term]
del term._connections[self]
self.recolor()
term.recolor()
self.disconnected(term)
term.disconnected(self)
#if self.isOutput() and term.isInput():
#term.inputChanged(self)
#if term.isInput() and term.isOutput():
#self.inputChanged(term)
def disconnectAll(self):
for t in list(self._connections.keys()):
self.disconnectFrom(t)
def recolor(self, color=None, recurse=True):
if color is None:
if not self.isConnected(): ## disconnected terminals are black
color = QtGui.QColor(0,0,0)
elif self.isInput() and not self.hasInput(): ## input terminal with no connected output terminals
color = QtGui.QColor(200,200,0)
elif self._value is None or eq(self._value, {}): ## terminal is connected but has no data (possibly due to processing error)
color = QtGui.QColor(255,255,255)
elif self.valueIsAcceptable() is None: ## terminal has data, but it is unknown if the data is ok
color = QtGui.QColor(200, 200, 0)
elif self.valueIsAcceptable() is True: ## terminal has good input, all ok
color = QtGui.QColor(0, 200, 0)
else: ## terminal has bad input
color = QtGui.QColor(200, 0, 0)
self.graphicsItem().setBrush(QtGui.QBrush(color))
if recurse:
for t in self.connections():
t.recolor(color, recurse=False)
def rename(self, name):
oldName = self._name
self._name = name
self.node().terminalRenamed(self, oldName)
self.graphicsItem().termRenamed(name)
def __repr__(self):
return "<Terminal %s.%s>" % (str(self.node().name()), str(self.name()))
#def extendedConnections(self, terms=None):
#"""Return list of terminals (including this one) that are directly or indirectly wired to this."""
#if terms is None:
#terms = {}
#terms[self] = None
#for t in self._connections:
#if t in terms:
#continue
#terms.update(t.extendedConnections(terms))
#return terms
def __hash__(self):
return id(self)
def close(self):
self.disconnectAll()
item = self.graphicsItem()
if item.scene() is not None:
item.scene().removeItem(item)
def saveState(self):
return {'io': self._io, 'multi': self._multi, 'optional': self._optional, 'renamable': self._renamable, 'removable': self._removable, 'multiable': self._multiable}
#class TerminalGraphicsItem(QtGui.QGraphicsItem):
class TerminalGraphicsItem(GraphicsObject):
def __init__(self, term, parent=None):
self.term = term
#QtGui.QGraphicsItem.__init__(self, parent)
GraphicsObject.__init__(self, parent)
self.brush = fn.mkBrush(0,0,0)
self.box = QtGui.QGraphicsRectItem(0, 0, 10, 10, self)
self.label = QtGui.QGraphicsTextItem(self.term.name(), self)
self.label.scale(0.7, 0.7)
#self.setAcceptHoverEvents(True)
self.newConnection = None
self.setFiltersChildEvents(True) ## to pick up mouse events on the rectitem
if self.term.isRenamable():
self.label.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.label.focusOutEvent = self.labelFocusOut
self.label.keyPressEvent = self.labelKeyPress
self.setZValue(1)
self.menu = None
def labelFocusOut(self, ev):
QtGui.QGraphicsTextItem.focusOutEvent(self.label, ev)
self.labelChanged()
def labelKeyPress(self, ev):
if ev.key() == QtCore.Qt.Key_Enter or ev.key() == QtCore.Qt.Key_Return:
self.labelChanged()
else:
QtGui.QGraphicsTextItem.keyPressEvent(self.label, ev)
def labelChanged(self):
newName = str(self.label.toPlainText())
if newName != self.term.name():
self.term.rename(newName)
def termRenamed(self, name):
self.label.setPlainText(name)
def setBrush(self, brush):
self.brush = brush
self.box.setBrush(brush)
def disconnect(self, target):
self.term.disconnectFrom(target.term)
def boundingRect(self):
br = self.box.mapRectToParent(self.box.boundingRect())
lr = self.label.mapRectToParent(self.label.boundingRect())
return br | lr
def paint(self, p, *args):
pass
def setAnchor(self, x, y):
pos = QtCore.QPointF(x, y)
self.anchorPos = pos
br = self.box.mapRectToParent(self.box.boundingRect())
lr = self.label.mapRectToParent(self.label.boundingRect())
if self.term.isInput():
self.box.setPos(pos.x(), pos.y()-br.height()/2.)
self.label.setPos(pos.x() + br.width(), pos.y() - lr.height()/2.)
else:
self.box.setPos(pos.x()-br.width(), pos.y()-br.height()/2.)
self.label.setPos(pos.x()-br.width()-lr.width(), pos.y()-lr.height()/2.)
self.updateConnections()
def updateConnections(self):
for t, c in self.term.connections().items():
c.updateLine()
def mousePressEvent(self, ev):
#ev.accept()
ev.ignore() ## necessary to allow click/drag events to process correctly
def mouseClickEvent(self, ev):
if ev.button() == QtCore.Qt.LeftButton:
ev.accept()
self.label.setFocus(QtCore.Qt.MouseFocusReason)
elif ev.button() == QtCore.Qt.RightButton:
ev.accept()
self.raiseContextMenu(ev)
def raiseContextMenu(self, ev):
## only raise menu if this terminal is removable
menu = self.getMenu()
menu = self.scene().addParentContextMenus(self, menu, ev)
pos = ev.screenPos()
menu.popup(QtCore.QPoint(pos.x(), pos.y()))
def getMenu(self):
if self.menu is None:
self.menu = QtGui.QMenu()
self.menu.setTitle("Terminal")
remAct = QtGui.QAction("Remove terminal", self.menu)
remAct.triggered.connect(self.removeSelf)
self.menu.addAction(remAct)
self.menu.remAct = remAct
if not self.term.isRemovable():
remAct.setEnabled(False)
multiAct = QtGui.QAction("Multi-value", self.menu)
multiAct.setCheckable(True)
multiAct.setChecked(self.term.isMultiValue())
multiAct.setEnabled(self.term.isMultiable())
multiAct.triggered.connect(self.toggleMulti)
self.menu.addAction(multiAct)
self.menu.multiAct = multiAct
if self.term.isMultiable():
multiAct.setEnabled = False
return self.menu
def toggleMulti(self):
multi = self.menu.multiAct.isChecked()
self.term.setMultiValue(multi)
## probably never need this
#def getContextMenus(self, ev):
#return [self.getMenu()]
def removeSelf(self):
self.term.node().removeTerminal(self.term)
def mouseDragEvent(self, ev):
if ev.button() != QtCore.Qt.LeftButton:
ev.ignore()
return
ev.accept()
if ev.isStart():
if self.newConnection is None:
self.newConnection = ConnectionItem(self)
#self.scene().addItem(self.newConnection)
self.getViewBox().addItem(self.newConnection)
#self.newConnection.setParentItem(self.parent().parent())
self.newConnection.setTarget(self.mapToView(ev.pos()))
elif ev.isFinish():
if self.newConnection is not None:
items = self.scene().items(ev.scenePos())
gotTarget = False
for i in items:
if isinstance(i, TerminalGraphicsItem):
self.newConnection.setTarget(i)
try:
self.term.connectTo(i.term, self.newConnection)
gotTarget = True
except:
self.scene().removeItem(self.newConnection)
self.newConnection = None
raise
break
if not gotTarget:
#print "remove unused connection"
#self.scene().removeItem(self.newConnection)
self.newConnection.close()
self.newConnection = None
else:
if self.newConnection is not None:
self.newConnection.setTarget(self.mapToView(ev.pos()))
def hoverEvent(self, ev):
if not ev.isExit() and ev.acceptDrags(QtCore.Qt.LeftButton):
ev.acceptClicks(QtCore.Qt.LeftButton) ## we don't use the click, but we also don't want anyone else to use it.
ev.acceptClicks(QtCore.Qt.RightButton)
self.box.setBrush(fn.mkBrush('w'))
else:
self.box.setBrush(self.brush)
self.update()
#def hoverEnterEvent(self, ev):
#self.hover = True
#def hoverLeaveEvent(self, ev):
#self.hover = False
def connectPoint(self):
## return the connect position of this terminal in view coords
return self.mapToView(self.mapFromItem(self.box, self.box.boundingRect().center()))
def nodeMoved(self):
for t, item in self.term.connections().items():
item.updateLine()
#class ConnectionItem(QtGui.QGraphicsItem):
class ConnectionItem(GraphicsObject):
def __init__(self, source, target=None):
#QtGui.QGraphicsItem.__init__(self)
GraphicsObject.__init__(self)
self.setFlags(
self.ItemIsSelectable |
self.ItemIsFocusable
)
self.source = source
self.target = target
self.length = 0
self.hovered = False
#self.line = QtGui.QGraphicsLineItem(self)
self.source.getViewBox().addItem(self)
self.updateLine()
self.setZValue(0)
def close(self):
if self.scene() is not None:
#self.scene().removeItem(self.line)
self.scene().removeItem(self)
def setTarget(self, target):
self.target = target
self.updateLine()
def updateLine(self):
start = Point(self.source.connectPoint())
if isinstance(self.target, TerminalGraphicsItem):
stop = Point(self.target.connectPoint())
elif isinstance(self.target, QtCore.QPointF):
stop = Point(self.target)
else:
return
self.prepareGeometryChange()
self.resetTransform()
ang = (stop-start).angle(Point(0, 1))
if ang is None:
ang = 0
self.rotate(ang)
self.setPos(start)
self.length = (start-stop).length()
self.update()
#self.line.setLine(start.x(), start.y(), stop.x(), stop.y())
def keyPressEvent(self, ev):
if ev.key() == QtCore.Qt.Key_Delete or ev.key() == QtCore.Qt.Key_Backspace:
#if isinstance(self.target, TerminalGraphicsItem):
self.source.disconnect(self.target)
ev.accept()
else:
ev.ignore()
def mousePressEvent(self, ev):
ev.ignore()
def mouseClickEvent(self, ev):
if ev.button() == QtCore.Qt.LeftButton:
ev.accept()
sel = self.isSelected()
self.setSelected(True)
if not sel and self.isSelected():
self.update()
def hoverEvent(self, ev):
if (not ev.isExit()) and ev.acceptClicks(QtCore.Qt.LeftButton):
self.hovered = True
else:
self.hovered = False
self.update()
def boundingRect(self):
#return self.line.boundingRect()
px = self.pixelWidth()
return QtCore.QRectF(-5*px, 0, 10*px, self.length)
#def shape(self):
#return self.line.shape()
def paint(self, p, *args):
if self.isSelected():
p.setPen(fn.mkPen(200, 200, 0, width=3))
else:
if self.hovered:
p.setPen(fn.mkPen(150, 150, 250, width=1))
else:
p.setPen(fn.mkPen(100, 100, 250, width=1))
p.drawLine(0, 0, 0, self.length)
|
eltonsantos/django
|
refs/heads/master
|
tests/unmanaged_models/__init__.py
|
45382
| |
MSusik/invenio
|
refs/heads/master
|
invenio/modules/upgrader/upgrades/invenio_2012_11_15_bibdocfile_model.py
|
3
|
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2012, 2013 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.legacy.dbquery import run_sql, OperationalError
from six.moves import cPickle
import logging
depends_on = ['invenio_release_1_1_0']
update_needed = True
def info():
return "Change of the underlying data model allowing extended BibDocs and MoreInfo"
def do_upgrade():
""" Implement your upgrades here """
logger = logging.getLogger('invenio_upgrader')
if update_needed:
_backup_tables(logger)
_update_database_structure_pre(logger)
recids = _retrieve_fulltext_recids()
for recid in recids:
if not _fix_recid(recid, logger):
logger.info("ERROR: Failed fixing the record %s" % (str(recid)))
_update_database_structure_post(logger)
else:
logger.info("Update executed but not needed. skipping")
def estimate():
""" Estimate running time of upgrade in seconds (optional). """
res = run_sql("select count(*) from bibdoc")
if res:
return int(float(res[0][0]) / 40)
return 0
def pre_upgrade():
""" Run pre-upgrade checks (optional). """
# Example of raising errors:
res = run_sql("show create table bibdoc")[0][1]
global update_needed
if not "more_info" in res:
update_needed = False
def post_upgrade():
""" Run post-upgrade checks (optional). """
# Example of issuing warnings:
# warnings.warn("A continuable error occurred")
pass
# private methods
def _update_database_structure_pre(logger):
"""This function alters the already existing database by adding additional columns ... the step from before modification"""
logger.info("Adding missing columns to tables")
try:
run_sql("ALTER TABLE bibdoc ADD COLUMN doctype varchar(255) AFTER more_info")
except Exception as e:
logger.info("WARNING: Problem when altering table. Is the database really in the state from before the upgrade ? " + str(e))
try:
run_sql("ALTER TABLE bibdoc CHANGE COLUMN docname docname varchar(250) COLLATE utf8_bin default NULL")
except Exception as e:
logger.info("WARNING: Problem when altering table. Is the database really in the state from before the upgrade ? " + str(e))
try:
run_sql("ALTER TABLE bibrec_bibdoc ADD COLUMN docname varchar(250) COLLATE utf8_bin NOT NULL default 'file' AFTER id_bibdoc, ADD KEY docname(docname)")
except Exception as e:
logger.info("WARNING: Problem when altering table. Is the database really in the state from before the upgrade ? " + str(e))
try:
run_sql("ALTER TABLE bibdoc_bibdoc CHANGE COLUMN id_bibdoc1 id_bibdoc1 mediumint(9) unsigned DEFAULT NULL")
run_sql("ALTER TABLE bibdoc_bibdoc CHANGE COLUMN id_bibdoc2 id_bibdoc2 mediumint(9) unsigned DEFAULT NULL")
run_sql("ALTER TABLE bibdoc_bibdoc ADD COLUMN id mediumint(9) unsigned NOT NULL auto_increment FIRST, ADD COLUMN version1 tinyint(4) unsigned AFTER id_bibdoc1, ADD COLUMN format1 varchar(50) AFTER version1, ADD COLUMN version2 tinyint(4) unsigned AFTER id_bibdoc2, ADD COLUMN format2 varchar(50) AFTER version2, CHANGE COLUMN type rel_type varchar(255) AFTER format2, ADD KEY (id)")
except Exception as e:
logger.info("WARNING: Problem when altering table. Is the database really in the state from before the upgrade ? " + str(e))
run_sql("""CREATE TABLE IF NOT EXISTS bibdocmoreinfo (
id_bibdoc mediumint(9) unsigned DEFAULT NULL,
version tinyint(4) unsigned DEFAULT NULL,
format VARCHAR(50) DEFAULT NULL,
id_rel mediumint(9) unsigned DEFAULT NULL,
namespace VARCHAR(25) DEFAULT NULL,
data_key VARCHAR(25),
data_value MEDIUMBLOB,
KEY (id_bibdoc, version, format, id_rel, namespace, data_key)
) ENGINE=MyISAM;""")
def _update_database_structure_post(logger):
"""This function alters the already existing database by removing columns ... the step after the modification"""
logger.info("Removing unnecessary columns from tables")
run_sql("ALTER TABLE bibdoc DROP COLUMN more_info")
def _backup_tables(logger):
"""This function create a backup of bibrec_bibdoc, bibdoc and bibdoc_bibdoc tables. Returns False in case dropping of previous table is needed."""
logger.info("droping old backup tables")
run_sql('DROP TABLE IF EXISTS bibrec_bibdoc_backup_newdatamodel')
run_sql('DROP TABLE IF EXISTS bibdoc_backup_newdatamodel')
run_sql('DROP TABLE IF EXISTS bibdoc_bibdoc_backup_newdatamodel')
try:
run_sql("""CREATE TABLE bibrec_bibdoc_backup_newdatamodel SELECT * FROM bibrec_bibdoc""")
run_sql("""CREATE TABLE bibdoc_backup_newdatamodel SELECT * FROM bibdoc""")
run_sql("""CREATE TABLE bibdoc_bibdoc_backup_newdatamodel SELECT * FROM bibdoc_bibdoc""")
except OperationalError as e:
logger.info("Problem when backing up tables")
raise
return True
def _retrieve_fulltext_recids():
"""Returns the list of all the recid number linked with at least a fulltext
file."""
res = run_sql('SELECT DISTINCT id_bibrec FROM bibrec_bibdoc')
return [int(x[0]) for x in res]
def _fix_recid(recid, logger):
"""Fix a given recid."""
#logger.info("Upgrading record %s:" % recid)
# 1) moving docname and type to the relation with bibrec
bibrec_docs = run_sql("select id_bibdoc, type from bibrec_bibdoc where id_bibrec=%s", (recid, ))
are_equal = True
for docid_str in bibrec_docs:
docid = str(docid_str[0])
doctype = str(docid_str[1])
#logger.info("Upgrading document %s:" % (docid, ))
res2 = run_sql("select docname, more_info from bibdoc where id=%s", (docid,))
if not res2:
logger.error("Error when migrating document %s attached to the record %s: can not retrieve from the bibdoc table " % (docid, recid))
else:
docname = str(res2[0][0])
run_sql("update bibrec_bibdoc set docname=%%s where id_bibrec=%s and id_bibdoc=%s" % (str(recid), docid), (docname, ))
run_sql("update bibdoc set doctype=%%s where id=%s" % (docid,), (doctype, ))
# 2) moving moreinfo to the new moreinfo structures (default namespace)
if res2[0][1]:
minfo = cPickle.loads(res2[0][1])
# 2a migrating descriptions->version->format
new_value = cPickle.dumps(minfo['descriptions'])
run_sql("INSERT INTO bibdocmoreinfo (id_bibdoc, namespace, data_key, data_value) VALUES (%s, %s, %s, %s)", (str(docid), "", "descriptions", new_value))
# 2b migrating comments->version->format
new_value = cPickle.dumps(minfo['comments'])
run_sql("INSERT INTO bibdocmoreinfo (id_bibdoc, namespace, data_key, data_value) VALUES (%s, %s, %s, %s)", (str(docid), "", "comments", new_value))
# 2c migrating flags->flagname->version->format
new_value = cPickle.dumps(minfo['flags'])
run_sql("INSERT INTO bibdocmoreinfo (id_bibdoc, namespace, data_key, data_value) VALUES (%s, %s, %s, %s)", (str(docid), "", "flags", new_value))
# 3) Verify the correctness of moreinfo transformations
try:
descriptions = cPickle.loads(run_sql("SELECT data_value FROM bibdocmoreinfo WHERE id_bibdoc=%s AND namespace=%s AND data_key=%s", (str(docid), '', 'descriptions'))[0][0])
for version in minfo['descriptions']:
for docformat in minfo['descriptions'][version]:
v1 = descriptions[version][docformat]
v2 = minfo['descriptions'][version][docformat]
if v1 != v2:
are_equal = False
logger.info("ERROR: Document %s: Expected description %s and got %s" % (str(docid), str(v2), str(v1)))
except Exception as e:
logger.info("ERROR: Document %s: Problem with retrieving descriptions: %s MoreInfo: %s Descriptions: %s" % (str(docid), str(e), str(minfo), str(descriptions)))
try:
comments = cPickle.loads(run_sql("SELECT data_value FROM bibdocmoreinfo WHERE id_bibdoc=%s AND namespace=%s AND data_key=%s", (str(docid), '', 'comments'))[0][0])
for version in minfo['comments']:
for docformat in minfo['comments'][version]:
v1 = comments[version][docformat]
v2 = minfo['comments'][version][docformat]
if v1 != v2:
are_equal = False
logger.info("ERROR: Document %s: Expected comment %s and got %s" % (str(docid), str(v2), str(v1)))
except Exception as e:
logger.info("ERROR: Document %s: Problem with retrieving comments: %s MoreInfo: %s Comments: %s" % (str(docid), str(e), str(minfo), str(comments)))
try:
flags = cPickle.loads(run_sql("SELECT data_value FROM bibdocmoreinfo WHERE id_bibdoc=%s AND namespace=%s AND data_key=%s", (str(docid), '', 'flags'))[0][0])
for flagname in minfo['flags']:
for version in minfo['flags'][flagname]:
for docformat in minfo['flags'][flagname][version]:
if minfo['flags'][flagname][version][docformat]:
are_equal = are_equal and (docformat in flags[flagname][version])
if not (docformat in flags[flagname][version]):
logger.info("ERROR: Document %s: Expected %s" % (str(docid), str(minfo) ))
except Exception as e:
logger.info("ERROR: Document %s: Problem with retrieving flags. %s MoreInfo: %s flags: %s" % (str(docid), str(e), str(minfo), str(flags)))
if not are_equal:
logger.info("Failed to move MoreInfo structures from old database to the new one docid: %s" % (str(docid),))
return are_equal
|
HalcyonChimera/osf.io
|
refs/heads/develop
|
addons/onedrive/apps.py
|
14
|
from addons.base.apps import BaseAddonAppConfig, generic_root_folder
onedrive_root_folder = generic_root_folder('onedrive')
class OneDriveAddonAppConfig(BaseAddonAppConfig):
name = 'addons.onedrive'
label = 'addons_onedrive'
full_name = 'OneDrive'
short_name = 'onedrive'
owners = ['user', 'node']
configs = ['accounts', 'node']
categories = ['storage']
has_hgrid_files = True
@property
def get_hgrid_data(self):
return onedrive_root_folder
FILE_ADDED = 'onedrive_file_added'
FILE_REMOVED = 'onedrive_file_removed'
FILE_UPDATED = 'onedrive_file_updated'
FOLDER_CREATED = 'onedrive_folder_created'
FOLDER_SELECTED = 'onedrive_folder_selected'
NODE_AUTHORIZED = 'onedrive_node_authorized'
NODE_DEAUTHORIZED = 'onedrive_node_deauthorized'
NODE_DEAUTHORIZED_NO_USER = 'onedrive_node_deauthorized_no_user'
actions = (FILE_ADDED, FILE_REMOVED, FILE_UPDATED, FOLDER_CREATED, FOLDER_SELECTED, NODE_AUTHORIZED, NODE_DEAUTHORIZED, NODE_DEAUTHORIZED_NO_USER)
@property
def routes(self):
from . import routes
return [routes.api_routes]
@property
def user_settings(self):
return self.get_model('UserSettings')
@property
def node_settings(self):
return self.get_model('NodeSettings')
|
drnextgis/mapproxy
|
refs/heads/master
|
mapproxy/test/unit/test_config.py
|
6
|
# This file is part of the MapProxy project.
# Copyright (C) 2010 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from mapproxy.config import Options, base_config, load_base_config
from mapproxy.test.helper import TempFiles
def teardown_module():
load_base_config(clear_existing=True)
class TestOptions(object):
def test_update_overwrite(self):
d = Options(foo='bar', baz=4)
d.update(Options(baz=5))
assert d.baz == 5
assert d.foo == 'bar'
def test_update_new(self):
d = Options(foo='bar', baz=4)
d.update(Options(biz=5))
assert d.baz == 4
assert d.biz == 5
assert d.foo == 'bar'
def test_update_recursive(self):
d = Options(
foo='bar',
baz=Options(ham=2, eggs=4))
d.update(Options(baz=Options(eggs=5)))
assert d.foo == 'bar'
assert d.baz.ham == 2
assert d.baz.eggs == 5
def test_compare(self):
assert Options(foo=4) == Options(foo=4)
assert Options(foo=Options(bar=4)) == Options(foo=Options(bar=4))
class TestDefaultsLoading(object):
defaults_yaml = b"""
foo:
bar:
ham: 2
eggs: 4
biz: 'foobar'
wiz: 'foobar'
"""
def test_defaults(self):
with TempFiles() as tmp:
with open(tmp[0], 'wb') as f:
f.write(TestDefaultsLoading.defaults_yaml)
load_base_config(config_file=tmp[0], clear_existing=True)
assert base_config().biz == 'foobar'
assert base_config().wiz == 'foobar'
assert base_config().foo.bar.ham == 2
assert base_config().foo.bar.eggs == 4
assert not hasattr(base_config(), 'wms')
def test_defaults_overwrite(self):
with TempFiles(2) as tmp:
with open(tmp[0], 'wb') as f:
f.write(TestDefaultsLoading.defaults_yaml)
with open(tmp[1], 'wb') as f:
f.write(b"""
baz: [9, 2, 1, 4]
biz: 'barfoo'
foo:
bar:
eggs: 5
""")
load_base_config(config_file=tmp[0], clear_existing=True)
load_base_config(config_file=tmp[1])
assert base_config().biz == 'barfoo'
assert base_config().wiz == 'foobar'
assert base_config().baz == [9, 2, 1, 4]
assert base_config().foo.bar.ham == 2
assert base_config().foo.bar.eggs == 5
assert not hasattr(base_config(), 'wms')
class TestSRSConfig(object):
def setup(self):
import mapproxy.config.config
mapproxy.config.config._config.pop()
def test_user_srs_definitions(self):
user_yaml = b"""
srs:
axis_order_ne: ['EPSG:9999']
"""
with TempFiles() as tmp:
with open(tmp[0], 'wb') as f:
f.write(user_yaml)
load_base_config(config_file=tmp[0])
assert 'EPSG:9999' in base_config().srs.axis_order_ne
assert 'EPSG:9999' not in base_config().srs.axis_order_en
#defaults still there
assert 'EPSG:31468' in base_config().srs.axis_order_ne
assert 'CRS:84' in base_config().srs.axis_order_en
|
liyitest/rr
|
refs/heads/master
|
horizon/exceptions.py
|
17
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Exceptions raised by the Horizon code and the machinery for handling them.
"""
import logging
import os
import sys
import six
from django.core.management import color_style # noqa
from django.http import HttpRequest # noqa
from django.utils import encoding
from django.utils.translation import ugettext_lazy as _
from django.views.debug import CLEANSED_SUBSTITUTE # noqa
from django.views.debug import SafeExceptionReporterFilter # noqa
from horizon.conf import HORIZON_CONFIG # noqa
from horizon import messages
LOG = logging.getLogger(__name__)
class HorizonReporterFilter(SafeExceptionReporterFilter):
"""Error report filter that's always active, even in DEBUG mode."""
def is_active(self, request):
return True
# TODO(gabriel): This bugfix is cribbed from Django's code. When 1.4.1
# is available we can remove this code.
def get_traceback_frame_variables(self, request, tb_frame):
"""Replaces the values of variables marked as sensitive with
stars (*********).
"""
# Loop through the frame's callers to see if the sensitive_variables
# decorator was used.
current_frame = tb_frame.f_back
sensitive_variables = None
while current_frame is not None:
if (current_frame.f_code.co_name == 'sensitive_variables_wrapper'
and 'sensitive_variables_wrapper'
in current_frame.f_locals):
# The sensitive_variables decorator was used, so we take note
# of the sensitive variables' names.
wrapper = current_frame.f_locals['sensitive_variables_wrapper']
sensitive_variables = getattr(wrapper,
'sensitive_variables',
None)
break
current_frame = current_frame.f_back
cleansed = []
if self.is_active(request) and sensitive_variables:
if sensitive_variables == '__ALL__':
# Cleanse all variables
for name, value in tb_frame.f_locals.items():
cleansed.append((name, CLEANSED_SUBSTITUTE))
return cleansed
else:
# Cleanse specified variables
for name, value in tb_frame.f_locals.items():
if name in sensitive_variables:
value = CLEANSED_SUBSTITUTE
elif isinstance(value, HttpRequest):
# Cleanse the request's POST parameters.
value = self.get_request_repr(value)
cleansed.append((name, value))
return cleansed
else:
# Potentially cleanse only the request if it's one of the
# frame variables.
for name, value in tb_frame.f_locals.items():
if isinstance(value, HttpRequest):
# Cleanse the request's POST parameters.
value = self.get_request_repr(value)
cleansed.append((name, value))
return cleansed
class HorizonException(Exception):
"""Base exception class for distinguishing our own exception classes."""
pass
class Http302(HorizonException):
"""Error class which can be raised from within a handler to cause an
early bailout and redirect at the middleware level.
"""
status_code = 302
def __init__(self, location, message=None):
self.location = location
self.message = message
class NotAuthorized(HorizonException):
"""Raised whenever a user attempts to access a resource which they do not
have permission-based access to (such as when failing the
:func:`~horizon.decorators.require_perms` decorator).
The included :class:`~horizon.middleware.HorizonMiddleware` catches
``NotAuthorized`` and handles it gracefully by displaying an error
message and redirecting the user to a login page.
"""
status_code = 401
class NotAuthenticated(HorizonException):
"""Raised when a user is trying to make requests and they are not logged
in.
The included :class:`~horizon.middleware.HorizonMiddleware` catches
``NotAuthenticated`` and handles it gracefully by displaying an error
message and redirecting the user to a login page.
"""
status_code = 403
class NotFound(HorizonException):
"""Generic error to replace all "Not Found"-type API errors."""
status_code = 404
class Conflict(HorizonException):
"""Generic error to replace all "Conflict"-type API errors."""
status_code = 409
class RecoverableError(HorizonException):
"""Generic error to replace any "Recoverable"-type API errors."""
status_code = 100 # HTTP status code "Continue"
class ServiceCatalogException(HorizonException):
"""Raised when a requested service is not available in the
``ServiceCatalog`` returned by Keystone.
"""
def __init__(self, service_name):
message = 'Invalid service catalog service: %s' % service_name
super(ServiceCatalogException, self).__init__(message)
class AlreadyExists(HorizonException):
"""Exception to be raised when trying to create an API resource which
already exists.
"""
def __init__(self, name, resource_type):
self.attrs = {"name": name, "resource": resource_type}
self.msg = _('A %(resource)s with the name "%(name)s" already exists.')
def __repr__(self):
return self.msg % self.attrs
def __str__(self):
return self.msg % self.attrs
def __unicode__(self):
return self.msg % self.attrs
class ConfigurationError(HorizonException):
"""Exception to be raised when invalid settings have been provided."""
pass
class NotAvailable(HorizonException):
"""Exception to be raised when something is not available."""
pass
class WorkflowError(HorizonException):
"""Exception to be raised when something goes wrong in a workflow."""
pass
class WorkflowValidationError(HorizonException):
"""Exception raised during workflow validation if required data is missing,
or existing data is not valid.
"""
pass
class HandledException(HorizonException):
"""Used internally to track exceptions that have gone through
:func:`horizon.exceptions.handle` more than once.
"""
def __init__(self, wrapped):
self.wrapped = wrapped
UNAUTHORIZED = tuple(HORIZON_CONFIG['exceptions']['unauthorized'])
NOT_FOUND = tuple(HORIZON_CONFIG['exceptions']['not_found'])
RECOVERABLE = (AlreadyExists, Conflict, NotAvailable, ServiceCatalogException)
RECOVERABLE += tuple(HORIZON_CONFIG['exceptions']['recoverable'])
def error_color(msg):
return color_style().ERROR_OUTPUT(msg)
def check_message(keywords, message):
"""Checks an exception for given keywords and raises a new ``ActionError``
with the desired message if the keywords are found. This allows selective
control over API error messages.
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
if set(str(exc_value).split(" ")).issuperset(set(keywords)):
exc_value._safe_message = message
raise
def handle_unauthorized(request, message, redirect, ignore, escalate, handled,
force_silence, force_log,
log_method, log_entry, log_level):
if ignore:
return NotAuthorized
if not force_silence and not handled:
log_method(error_color("Unauthorized: %s" % log_entry))
if not handled:
if message:
message = _("Unauthorized: %s") % message
# We get some pretty useless error messages back from
# some clients, so let's define our own fallback.
fallback = _("Unauthorized. Please try logging in again.")
messages.error(request, message or fallback)
# Escalation means logging the user out and raising NotAuthorized
# so the middleware will redirect them appropriately.
if escalate:
# Prevents creation of circular import. django.contrib.auth
# requires openstack_dashboard.settings to be loaded (by trying to
# access settings.CACHES in in django.core.caches) while
# openstack_dashboard.settings requires django.contrib.auth to be
# loaded while importing openstack_auth.utils
from django.contrib.auth import logout # noqa
logout(request)
raise NotAuthorized
# Otherwise continue and present our "unauthorized" error message.
return NotAuthorized
def handle_notfound(request, message, redirect, ignore, escalate, handled,
force_silence, force_log,
log_method, log_entry, log_level):
if not force_silence and not handled and (not ignore or force_log):
log_method(error_color("Not Found: %s" % log_entry))
if not ignore and not handled:
messages.error(request, message or log_entry)
if redirect:
raise Http302(redirect)
if not escalate:
return NotFound # return to normal code flow
def handle_recoverable(request, message, redirect, ignore, escalate, handled,
force_silence, force_log,
log_method, log_entry, log_level):
if not force_silence and not handled and (not ignore or force_log):
# Default recoverable error to WARN log level
log_method = getattr(LOG, log_level or "warning")
log_method(error_color("Recoverable error: %s" % log_entry))
if not ignore and not handled:
messages.error(request, message or log_entry)
if redirect:
raise Http302(redirect)
if not escalate:
return RecoverableError # return to normal code flow
HANDLE_EXC_METHODS = [
{'exc': UNAUTHORIZED, 'handler': handle_unauthorized, 'set_wrap': False},
{'exc': NOT_FOUND, 'handler': handle_notfound, 'set_wrap': True},
{'exc': RECOVERABLE, 'handler': handle_recoverable, 'set_wrap': True},
]
def handle(request, message=None, redirect=None, ignore=False,
escalate=False, log_level=None, force_log=None):
"""Centralized error handling for Horizon.
Because Horizon consumes so many different APIs with completely
different ``Exception`` types, it's necessary to have a centralized
place for handling exceptions which may be raised.
Exceptions are roughly divided into 3 types:
#. ``UNAUTHORIZED``: Errors resulting from authentication or authorization
problems. These result in being logged out and sent to the login screen.
#. ``NOT_FOUND``: Errors resulting from objects which could not be
located via the API. These generally result in a user-facing error
message, but are otherwise returned to the normal code flow. Optionally
a redirect value may be passed to the error handler so users are
returned to a different view than the one requested in addition to the
error message.
#. RECOVERABLE: Generic API errors which generate a user-facing message
but drop directly back to the regular code flow.
All other exceptions bubble the stack as normal unless the ``ignore``
argument is passed in as ``True``, in which case only unrecognized
errors are bubbled.
If the exception is not re-raised, an appropriate wrapper exception
class indicating the type of exception that was encountered will be
returned.
"""
exc_type, exc_value, exc_traceback = sys.exc_info()
log_method = getattr(LOG, log_level or "exception")
force_log = force_log or os.environ.get("HORIZON_TEST_RUN", False)
force_silence = getattr(exc_value, "silence_logging", False)
# Because the same exception may travel through this method more than
# once (if it's re-raised) we may want to treat it differently
# the second time (e.g. no user messages/logging).
handled = issubclass(exc_type, HandledException)
wrap = False
# Restore our original exception information, but re-wrap it at the end
if handled:
exc_type, exc_value, exc_traceback = exc_value.wrapped
wrap = True
log_entry = encoding.force_text(exc_value)
# We trust messages from our own exceptions
if issubclass(exc_type, HorizonException):
message = exc_value
# Check for an override message
elif getattr(exc_value, "_safe_message", None):
message = exc_value._safe_message
# If the message has a placeholder for the exception, fill it in
elif message and "%(exc)s" in message:
message = encoding.force_text(message) % {"exc": log_entry}
if message:
message = encoding.force_text(message)
for exc_handler in HANDLE_EXC_METHODS:
if issubclass(exc_type, exc_handler['exc']):
if exc_handler['set_wrap']:
wrap = True
handler = exc_handler['handler']
ret = handler(request, message, redirect, ignore, escalate,
handled, force_silence, force_log,
log_method, log_entry, log_level)
if ret:
return ret # return to normal code flow
# If we've gotten here, time to wrap and/or raise our exception.
if wrap:
raise HandledException([exc_type, exc_value, exc_traceback])
six.reraise(exc_type, exc_value, exc_traceback)
|
Zelgadis87/Sick-Beard
|
refs/heads/master
|
lib/enzyme/core.py
|
180
|
# -*- coding: utf-8 -*-
# enzyme - Video metadata parser
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
# Copyright 2003-2006 Thomas Schueppel <stain@acm.org>
# Copyright 2003-2006 Dirk Meyer <dischi@freevo.org>
#
# This file is part of enzyme.
#
# enzyme is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# enzyme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with enzyme. If not, see <http://www.gnu.org/licenses/>.
import re
import logging
import fourcc
import language
from strutils import str_to_unicode, unicode_to_str
UNPRINTABLE_KEYS = ['thumbnail', 'url', 'codec_private']
MEDIACORE = ['title', 'caption', 'comment', 'size', 'type', 'subtype', 'timestamp',
'keywords', 'country', 'language', 'langcode', 'url', 'artist',
'mime', 'datetime', 'tags', 'hash']
AUDIOCORE = ['channels', 'samplerate', 'length', 'encoder', 'codec', 'format',
'samplebits', 'bitrate', 'fourcc', 'trackno', 'id', 'userdate',
'enabled', 'default', 'codec_private']
MUSICCORE = ['trackof', 'album', 'genre', 'discs', 'thumbnail']
VIDEOCORE = ['length', 'encoder', 'bitrate', 'samplerate', 'codec', 'format',
'samplebits', 'width', 'height', 'fps', 'aspect', 'trackno',
'fourcc', 'id', 'enabled', 'default', 'codec_private']
AVCORE = ['length', 'encoder', 'trackno', 'trackof', 'copyright', 'product',
'genre', 'writer', 'producer', 'studio', 'rating', 'actors', 'thumbnail',
'delay', 'image', 'video', 'audio', 'subtitles', 'chapters', 'software',
'summary', 'synopsis', 'season', 'episode', 'series']
# get logging object
log = logging.getLogger(__name__)
class Media(object):
"""
Media is the base class to all Media Metadata Containers. It defines
the basic structures that handle metadata. Media and its derivates
contain a common set of metadata attributes that is listed in keys.
Specific derivates contain additional keys to the dublin core set that is
defined in Media.
"""
media = None
_keys = MEDIACORE
table_mapping = {}
def __init__(self, hash=None):
if hash is not None:
# create Media based on dict
for key, value in hash.items():
if isinstance(value, list) and value and isinstance(value[0], dict):
value = [Media(x) for x in value]
self._set(key, value)
return
self._keys = self._keys[:]
self.tables = {}
# Tags, unlike tables, are more well-defined dicts whose values are
# either Tag objects, other dicts (for nested tags), or lists of either
# (for multiple instances of the tag, e.g. actor). Where possible,
# parsers should transform tag names to conform to the Official
# Matroska tags defined at http://www.matroska.org/technical/specs/tagging/index.html
# All tag names will be lower-cased.
self.tags = Tags()
for key in set(self._keys) - set(['media', 'tags']):
setattr(self, key, None)
#
# unicode and string convertion for debugging
#
#TODO: Fix that mess
def __unicode__(self):
result = u''
# print normal attributes
lists = []
for key in self._keys:
value = getattr(self, key, None)
if value == None or key == 'url':
continue
if isinstance(value, list):
if not value:
continue
elif isinstance(value[0], basestring):
# Just a list of strings (keywords?), so don't treat it specially.
value = u', '.join(value)
else:
lists.append((key, value))
continue
elif isinstance(value, dict):
# Tables or tags treated separately.
continue
if key in UNPRINTABLE_KEYS:
value = '<unprintable data, size=%d>' % len(value)
result += u'| %10s: %s\n' % (unicode(key), unicode(value))
# print tags (recursively, to support nested tags).
def print_tags(tags, suffix, show_label):
result = ''
for n, (name, tag) in enumerate(tags.items()):
result += u'| %12s%s%s = ' % (u'tags: ' if n == 0 and show_label else '', suffix, name)
if isinstance(tag, list):
# TODO: doesn't support lists/dicts within lists.
result += u'%s\n' % ', '.join(subtag.value for subtag in tag)
else:
result += u'%s\n' % (tag.value or '')
if isinstance(tag, dict):
result += print_tags(tag, ' ', False)
return result
result += print_tags(self.tags, '', True)
# print lists
for key, l in lists:
for n, item in enumerate(l):
label = '+-- ' + key.rstrip('s').capitalize()
if key not in ['tracks', 'subtitles', 'chapters']:
label += ' Track'
result += u'%s #%d\n' % (label, n + 1)
result += '| ' + re.sub(r'\n(.)', r'\n| \1', unicode(item))
# print tables
#FIXME: WTH?
# if log.level >= 10:
# for name, table in self.tables.items():
# result += '+-- Table %s\n' % str(name)
# for key, value in table.items():
# try:
# value = unicode(value)
# if len(value) > 50:
# value = u'<unprintable data, size=%d>' % len(value)
# except (UnicodeDecodeError, TypeError):
# try:
# value = u'<unprintable data, size=%d>' % len(value)
# except AttributeError:
# value = u'<unprintable data>'
# result += u'| | %s: %s\n' % (unicode(key), value)
return result
def __str__(self):
return unicode(self).encode()
def __repr__(self):
if hasattr(self, 'url'):
return '<%s %s>' % (str(self.__class__)[8:-2], self.url)
else:
return '<%s>' % (str(self.__class__)[8:-2])
#
# internal functions
#
def _appendtable(self, name, hashmap):
"""
Appends a tables of additional metadata to the Object.
If such a table already exists, the given tables items are
added to the existing one.
"""
if name not in self.tables:
self.tables[name] = hashmap
else:
# Append to the already existing table
for k in hashmap.keys():
self.tables[name][k] = hashmap[k]
def _set(self, key, value):
"""
Set key to value and add the key to the internal keys list if
missing.
"""
if value is None and getattr(self, key, None) is None:
return
if isinstance(value, str):
value = str_to_unicode(value)
setattr(self, key, value)
if not key in self._keys:
self._keys.append(key)
def _set_url(self, url):
"""
Set the URL of the source
"""
self.url = url
def _finalize(self):
"""
Correct same data based on specific rules
"""
# make sure all strings are unicode
for key in self._keys:
if key in UNPRINTABLE_KEYS:
continue
value = getattr(self, key)
if value is None:
continue
if key == 'image':
if isinstance(value, unicode):
setattr(self, key, unicode_to_str(value))
continue
if isinstance(value, str):
setattr(self, key, str_to_unicode(value))
if isinstance(value, unicode):
setattr(self, key, value.strip().rstrip().replace(u'\0', u''))
if isinstance(value, list) and value and isinstance(value[0], Media):
for submenu in value:
submenu._finalize()
# copy needed tags from tables
for name, table in self.tables.items():
mapping = self.table_mapping.get(name, {})
for tag, attr in mapping.items():
if self.get(attr):
continue
value = table.get(tag, None)
if value is not None:
if not isinstance(value, (str, unicode)):
value = str_to_unicode(str(value))
elif isinstance(value, str):
value = str_to_unicode(value)
value = value.strip().rstrip().replace(u'\0', u'')
setattr(self, attr, value)
if 'fourcc' in self._keys and 'codec' in self._keys and self.codec is not None:
# Codec may be a fourcc, in which case we resolve it to its actual
# name and set the fourcc attribute.
self.fourcc, self.codec = fourcc.resolve(self.codec)
if 'language' in self._keys:
self.langcode, self.language = language.resolve(self.language)
#
# data access
#
def __contains__(self, key):
"""
Test if key exists in the dict
"""
return hasattr(self, key)
def get(self, attr, default=None):
"""
Returns the given attribute. If the attribute is not set by
the parser return 'default'.
"""
return getattr(self, attr, default)
def __getitem__(self, attr):
"""
Get the value of the given attribute
"""
return getattr(self, attr, None)
def __setitem__(self, key, value):
"""
Set the value of 'key' to 'value'
"""
setattr(self, key, value)
def has_key(self, key):
"""
Check if the object has an attribute 'key'
"""
return hasattr(self, key)
def convert(self):
"""
Convert Media to dict.
"""
result = {}
for k in self._keys:
value = getattr(self, k, None)
if isinstance(value, list) and value and isinstance(value[0], Media):
value = [x.convert() for x in value]
result[k] = value
return result
def keys(self):
"""
Return all keys for the attributes set by the parser.
"""
return self._keys
class Collection(Media):
"""
Collection of Digial Media like CD, DVD, Directory, Playlist
"""
_keys = Media._keys + ['id', 'tracks']
def __init__(self):
Media.__init__(self)
self.tracks = []
class Tag(object):
"""
An individual tag, which will be a value stored in a Tags object.
Tag values are strings (for binary data), unicode objects, or datetime
objects for tags that represent dates or times.
"""
def __init__(self, value=None, langcode='und', binary=False):
super(Tag, self).__init__()
self.value = value
self.langcode = langcode
self.binary = binary
def __unicode__(self):
return unicode(self.value)
def __str__(self):
return str(self.value)
def __repr__(self):
if not self.binary:
return '<Tag object: %s>' % repr(self.value)
else:
return '<Binary Tag object: size=%d>' % len(self.value)
@property
def langcode(self):
return self._langcode
@langcode.setter
def langcode(self, code):
self._langcode, self.language = language.resolve(code)
class Tags(dict, Tag):
"""
A dictionary containing Tag objects. Values can be other Tags objects
(for nested tags), lists, or Tag objects.
A Tags object is more or less a dictionary but it also contains a value.
This is necessary in order to represent this kind of tag specification
(e.g. for Matroska)::
<Simple>
<Name>LAW_RATING</Name>
<String>PG</String>
<Simple>
<Name>COUNTRY</Name>
<String>US</String>
</Simple>
</Simple>
The attribute RATING has a value (PG), but it also has a child tag
COUNTRY that specifies the country code the rating belongs to.
"""
def __init__(self, value=None, langcode='und', binary=False):
super(Tags, self).__init__()
self.value = value
self.langcode = langcode
self.binary = False
class AudioStream(Media):
"""
Audio Tracks in a Multiplexed Container.
"""
_keys = Media._keys + AUDIOCORE
class Music(AudioStream):
"""
Digital Music.
"""
_keys = AudioStream._keys + MUSICCORE
def _finalize(self):
"""
Correct same data based on specific rules
"""
AudioStream._finalize(self)
if self.trackof:
try:
# XXX Why is this needed anyway?
if int(self.trackno) < 10:
self.trackno = u'0%s' % int(self.trackno)
except (AttributeError, ValueError):
pass
class VideoStream(Media):
"""
Video Tracks in a Multiplexed Container.
"""
_keys = Media._keys + VIDEOCORE
class Chapter(Media):
"""
Chapter in a Multiplexed Container.
"""
_keys = ['enabled', 'name', 'pos', 'id']
def __init__(self, name=None, pos=0):
Media.__init__(self)
self.name = name
self.pos = pos
self.enabled = True
class Subtitle(Media):
"""
Subtitle Tracks in a Multiplexed Container.
"""
_keys = ['enabled', 'default', 'langcode', 'language', 'trackno', 'title',
'id', 'codec']
def __init__(self, language=None):
Media.__init__(self)
self.language = language
class AVContainer(Media):
"""
Container for Audio and Video streams. This is the Container Type for
all media, that contain more than one stream.
"""
_keys = Media._keys + AVCORE
def __init__(self):
Media.__init__(self)
self.audio = []
self.video = []
self.subtitles = []
self.chapters = []
def _finalize(self):
"""
Correct same data based on specific rules
"""
Media._finalize(self)
if not self.length and len(self.video) and self.video[0].length:
self.length = 0
# Length not specified for container, so use the largest length
# of its tracks as container length.
for track in self.video + self.audio:
if track.length:
self.length = max(self.length, track.length)
|
darktears/chromium-crosswalk
|
refs/heads/master
|
chrome/test/mini_installer/process_verifier.py
|
125
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import chrome_helper
import verifier
class ProcessVerifier(verifier.Verifier):
"""Verifies that the running processes match the expectation dictionaries."""
def _VerifyExpectation(self, expectation_name, expectation,
variable_expander):
"""Overridden from verifier.Verifier.
This method will throw an AssertionError if process state doesn't match the
|expectation|.
Args:
expectation_name: Path to the process being verified. It is expanded using
Expand.
expectation: A dictionary with the following key and value:
'running' a boolean indicating whether the process should be running.
variable_expander: A VariableExpander object.
"""
# Create a list of paths of all running processes.
running_process_paths = [path for (_, path) in
chrome_helper.GetProcessIDAndPathPairs()]
process_path = variable_expander.Expand(expectation_name)
is_running = process_path in running_process_paths
assert expectation['running'] == is_running, \
('Process %s is running' % process_path) if is_running else \
('Process %s is not running' % process_path)
|
mhoffma/micropython
|
refs/heads/master
|
tests/basics/bytes_gen.py
|
116
|
# construct a bytes object from a generator
def gen():
for i in range(4):
yield i
print(bytes(gen()))
|
chuangWu/linux
|
refs/heads/master
|
scripts/gdb/linux/dmesg.py
|
630
|
#
# gdb helper commands and functions for Linux kernel debugging
#
# kernel log buffer dump
#
# Copyright (c) Siemens AG, 2011, 2012
#
# Authors:
# Jan Kiszka <jan.kiszka@siemens.com>
#
# This work is licensed under the terms of the GNU GPL version 2.
#
import gdb
from linux import utils
class LxDmesg(gdb.Command):
"""Print Linux kernel log buffer."""
def __init__(self):
super(LxDmesg, self).__init__("lx-dmesg", gdb.COMMAND_DATA)
def invoke(self, arg, from_tty):
log_buf_addr = int(str(gdb.parse_and_eval("log_buf")).split()[0], 16)
log_first_idx = int(gdb.parse_and_eval("log_first_idx"))
log_next_idx = int(gdb.parse_and_eval("log_next_idx"))
log_buf_len = int(gdb.parse_and_eval("log_buf_len"))
inf = gdb.inferiors()[0]
start = log_buf_addr + log_first_idx
if log_first_idx < log_next_idx:
log_buf_2nd_half = -1
length = log_next_idx - log_first_idx
log_buf = inf.read_memory(start, length)
else:
log_buf_2nd_half = log_buf_len - log_first_idx
log_buf = inf.read_memory(start, log_buf_2nd_half) + \
inf.read_memory(log_buf_addr, log_next_idx)
pos = 0
while pos < log_buf.__len__():
length = utils.read_u16(log_buf[pos + 8:pos + 10])
if length == 0:
if log_buf_2nd_half == -1:
gdb.write("Corrupted log buffer!\n")
break
pos = log_buf_2nd_half
continue
text_len = utils.read_u16(log_buf[pos + 10:pos + 12])
text = log_buf[pos + 16:pos + 16 + text_len]
time_stamp = utils.read_u64(log_buf[pos:pos + 8])
for line in memoryview(text).tobytes().splitlines():
gdb.write("[{time:12.6f}] {line}\n".format(
time=time_stamp / 1000000000.0,
line=line))
pos += length
LxDmesg()
|
kylerichardson/metron
|
refs/heads/master
|
metron-deployment/packaging/ambari/metron-mpack/src/main/resources/common-services/METRON/CURRENT/package/scripts/management_ui_commands.py
|
7
|
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from resource_management.core.logger import Logger
from resource_management.core.resources.system import Execute, File
from resource_management.core.exceptions import ExecutionFailed
from resource_management.libraries.functions.get_user_call_output import get_user_call_output
import metron_service
# Wrap major operations and functionality in this class
class ManagementUICommands:
__params = None
def __init__(self, params):
if params is None:
raise ValueError("params argument is required for initialization")
self.__params = params
def start_management_ui(self):
"""
Starts the Management UI
:param env: Environment
"""
Logger.info('Starting Management UI')
start_cmd = ('service', 'metron-management-ui', 'start')
Execute(start_cmd, sudo=True)
Logger.info('Done starting Management UI')
def stop_management_ui(self):
"""
Stops the Management UI
:param env: Environment
"""
Logger.info('Stopping Management UI')
stop_cmd = ('service', 'metron-management-ui', 'stop')
Execute(stop_cmd, sudo=True)
Logger.info('Done stopping Management UI')
def restart_management_ui(self, env):
"""
Restarts the Management UI
:param env: Environment
"""
Logger.info('Restarting the Management UI')
restart_cmd = ('service', 'metron-management-ui', 'restart')
Execute(restart_cmd, sudo=True)
Logger.info('Done restarting the Management UI')
def status_management_ui(self, env):
"""
Performs a status check for the Management UI
:param env: Environment
"""
Logger.info('Status check the Management UI')
metron_service.check_http(
self.__params.metron_management_ui_host,
self.__params.metron_management_ui_port,
self.__params.metron_user)
def service_check(self, env):
"""
Performs a service check for the Management UI
:param env: Environment
"""
Logger.info('Checking connectivity to Management UI')
metron_service.check_http(
self.__params.metron_management_ui_host,
self.__params.metron_management_ui_port,
self.__params.metron_user)
Logger.info("Management UI service check completed successfully")
|
solvcon/solvcon
|
refs/heads/master
|
setup.py
|
2
|
# -*- coding: UTF-8 -*-
#
# Copyright (c) 2008, Yung-Yu Chen <yyc@solvcon.net>
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# - Neither the name of the SOLVCON nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""SOLVCON distribution script."""
from __future__ import absolute_import, division, print_function
CLASSIFIERS = """Development Status :: 3 - Alpha
Intended Audience :: Developers
Intended Audience :: Education
Intended Audience :: Science/Research
License :: OSI Approved :: BSD License
Operating System :: POSIX :: Linux
Operating System :: MacOS :: MacOS X
Programming Language :: C
Programming Language :: Python
Topic :: Scientific/Engineering
Topic :: Software Development :: Libraries :: Application Frameworks"""
import sys
import os
import glob
# BEFORE importing distutils, remove MANIFEST. distutils doesn't properly
# update it when the contents of directories change.
if os.path.exists('MANIFEST'):
os.remove('MANIFEST')
from distutils.ccompiler import CCompiler
from numpy.distutils.ccompiler import replace_method
from numpy.distutils.ccompiler import CCompiler_customize as numpy_CCompiler_customize
from numpy.distutils import log
from distutils.extension import Extension
from numpy.distutils.core import setup
from numpy.distutils.command import build_ext as np_build_ext
from Cython.Build import cythonize
from Cython.Distutils import Extension as CyExtension
import solvcon as sc
def CCompiler_customize(self, *args, **kw):
need_cxx = kw.get('need_cxx', 0)
# list unwanted flags (e.g. '-g') here.
unwanted = []
# call the original method.
numpy_CCompiler_customize(self, *args, **kw)
# update arguments.
ccshared = ' '.join(set(self.compiler_so) - set(self.compiler))
compiler = ' '.join(it for it in self.compiler if it not in unwanted)
old_compiler = self.compiler
self.set_executables(
compiler=compiler,
compiler_so=compiler + ' ' + ccshared,
)
modified = self.compiler != old_compiler
if modified and need_cxx and hasattr(self, 'compiler'):
log.warn("#### %s ####### %s removed" % (self.compiler, unwanted))
return
replace_method(CCompiler, 'customize', CCompiler_customize)
def make_cython_extension(
name, c_subdirs, include_dirs=None, libraries=None, extra_compile_args=None
):
pak_dir = os.path.join(*name.split('.')[:-1])
files = [name.replace('.', os.sep) + '.pyx']
for c_subdir in sorted(c_subdirs):
path = os.path.join(pak_dir, c_subdir, '*.c')
files += glob.glob(path)
include_dirs = [] if None is include_dirs else include_dirs
include_dirs.insert(0, 'solvcon')
include_dirs.insert(0, os.path.join(pak_dir))
libraries = [] if None is libraries else libraries
libraries = (['scotchmetis', 'scotch', 'scotcherr', 'scotcherrexit']
+ libraries)
rpathflag = '-Wl,-rpath,%s/lib' % sys.exec_prefix
if extra_compile_args is None: extra_compile_args = []
extra_compile_args = [
'-Werror',
'-Wno-cpp' if sys.platform != 'darwin' else '-Wno-#warnings',
'-Wno-unused-function',
] + extra_compile_args
return CyExtension(
name, files,
include_dirs=include_dirs,
libraries=libraries,
extra_compile_args=extra_compile_args,
extra_link_args=[rpathflag],
)
class my_build_ext(np_build_ext.build_ext):
def _copy_cmake_extension(self, ext):
import shutil
from distutils.errors import DistutilsSetupError
from distutils.dep_util import newer_group
sources = ext.sources
if sources is None or not isinstance(sources, (list, tuple)):
raise DistutilsSetupError(
"in 'ext_modules' option (extension '%s'), "
"'sources' must be present and must be "
"a list of source filenames" % ext.name)
sources = list(sources)
ext_path = self.get_ext_fullpath(ext.name)
depends = sources + ext.depends
if not (self.force or newer_group(depends, ext_path, 'newer')):
log.debug("skipping '%s' cmake extension (up-to-date)", ext.name)
return
else:
log.info("building '%s' cmake extension", ext.name)
extdir, extbase = os.path.split(ext_path)
if not os.path.exists(extdir):
os.makedirs(extdir)
shutil.copyfile(
os.path.join('solvcon', extbase), ext_path)
def build_extension(self, ext):
''' Copies the already-compiled pyd
'''
return np_build_ext.build_ext.build_extension(self, ext)
def main():
data_files = list()
# test data.
lead = os.path.join('share', 'solvcon', 'test')
data_files.extend([
(lead, glob.glob(os.path.join('test', 'data', '*.g'))),
(lead, glob.glob(os.path.join('test', 'data', '*.jou'))),
(lead, glob.glob(os.path.join('test', 'data', '*.nc'))),
(lead, glob.glob(os.path.join('test', 'data', '*.neu'))),
(lead, glob.glob(os.path.join('test', 'data', '*.blk'))),
(lead, glob.glob(os.path.join('test', 'data', '*.vtk'))),
(lead, glob.glob(os.path.join('test', 'data', '*.msh.gz'))),
(lead, glob.glob(os.path.join('test', 'data', '*.geo'))),
(os.path.join(lead, 'sample.dom'),
glob.glob(os.path.join('test', 'data', 'sample.dom', '*')))
])
# examples.
lead = os.path.join('share', 'solvcon')
for edir in glob.glob(os.path.join('examples', '*', '*')):
if os.path.isdir(edir):
data_files.append(
(os.path.join(lead, edir), [os.path.join(edir, 'go')]))
for ext in ('tmpl', 'py', 'h'):
data_files.append((os.path.join(lead, edir),
glob.glob(os.path.join(edir, '*.%s'%ext))))
turn_off_unused_warnings = ['-Wno-unused-variable']
if sys.platform != 'darwin':
turn_off_unused_warnings += ['-Wno-unused-but-set-variable']
# set up extension modules.
lapack_libraries = ['lapack', 'blas']
if os.environ.get('LAPACK_GFORTRAN'):
lapack_libraries.append('gfortran')
ext_modules = [
make_cython_extension(
'solvcon._march_bridge', [],
include_dirs=['libmarch/include']
),
make_cython_extension(
'solvcon.mesh',
['src'],
),
make_cython_extension(
'solvcon.parcel.fake._algorithm',
['src'],
extra_compile_args=turn_off_unused_warnings,
),
make_cython_extension(
'solvcon.parcel.linear._algorithm', ['src'],
libraries=lapack_libraries,
extra_compile_args=turn_off_unused_warnings + [
'-Wno-unknown-pragmas',
],
),
make_cython_extension(
'solvcon.parcel.bulk._algorithm',
['src'],
extra_compile_args=turn_off_unused_warnings + [
'-Wno-unknown-pragmas',
'-Wno-uninitialized',
],
),
make_cython_extension(
'solvcon.parcel.gas._algorithm',
['src'],
extra_compile_args=turn_off_unused_warnings + [
'-Wno-unknown-pragmas',
],
),
make_cython_extension(
'solvcon.parcel.vewave._algorithm', ['src'],
libraries=['lapack', 'blas'],
extra_compile_args=turn_off_unused_warnings + [
'-Wno-unknown-pragmas',
],
),
]
# remove files when cleaning.
sidx = sys.argv.index('setup.py') if 'setup.py' in sys.argv else -1
cidx = sys.argv.index('clean') if 'clean' in sys.argv else -1
if cidx > sidx:
derived = list()
for mod in ext_modules:
pyx = mod.sources[0] # this must be the pyx file.
mainfn, dotfn = os.path.splitext(pyx)
if '.pyx' == dotfn:
derived += ['.'.join((mainfn, ext)) for ext in ('c', 'h')]
derived += ['%s.so' % mainfn] + glob.glob('%s.*.so' % mainfn)
derived = [fn for fn in derived if os.path.exists(fn)]
if derived:
sys.stdout.write('Removing in-place generated files:')
for fn in derived:
os.remove(fn)
sys.stdout.write('\n %s' % fn)
sys.stdout.write('\n')
else:
if "/home/docs/checkouts/readthedocs.org" in os.getcwd():
# Do not build extension modules if I am in readthedocs.org,
# because the dependency cannot be met.
ext_modules = list()
else:
ext_modules = cythonize(ext_modules)
with open('README.rst') as fobj:
long_description = ''.join(fobj.read())
setup(
name='SOLVCON',
maintainer='Yung-Yu Chen',
author='Yung-Yu Chen',
maintainer_email='yyc@solvcon.net',
author_email='yyc@solvcon.net',
description='Solvers of Conservation Laws',
long_description=long_description,
license='BSD',
url='http://solvcon.net/',
download_url='https://github.com/solvcon/solvcon/releases',
classifiers=[tok.strip() for tok in CLASSIFIERS.split('\n')],
platforms=[
'Linux',
],
version=sc.__version__,
scripts=[
'scg',
],
packages=[
'solvcon',
'solvcon.io',
'solvcon.io.tests',
'solvcon.kerpak',
'solvcon.parcel',
'solvcon.parcel.bulk',
'solvcon.parcel.fake',
'solvcon.parcel.gas',
'solvcon.parcel.linear',
'solvcon.parcel.tests',
'solvcon.parcel.vewave',
'solvcon.tests',
],
cmdclass={
'build_ext': my_build_ext,
},
ext_modules=ext_modules,
data_files=data_files,
)
return
if __name__ == '__main__':
main()
|
robbiet480/home-assistant
|
refs/heads/dev
|
homeassistant/components/huawei_lte/const.py
|
5
|
"""Huawei LTE constants."""
DOMAIN = "huawei_lte"
DEFAULT_DEVICE_NAME = "LTE"
DEFAULT_NOTIFY_SERVICE_NAME = DOMAIN
UPDATE_SIGNAL = f"{DOMAIN}_update"
UPDATE_OPTIONS_SIGNAL = f"{DOMAIN}_options_update"
CONNECTION_TIMEOUT = 10
NOTIFY_SUPPRESS_TIMEOUT = 30
SERVICE_CLEAR_TRAFFIC_STATISTICS = "clear_traffic_statistics"
SERVICE_REBOOT = "reboot"
SERVICE_RESUME_INTEGRATION = "resume_integration"
SERVICE_SUSPEND_INTEGRATION = "suspend_integration"
ADMIN_SERVICES = {
SERVICE_CLEAR_TRAFFIC_STATISTICS,
SERVICE_REBOOT,
SERVICE_RESUME_INTEGRATION,
SERVICE_SUSPEND_INTEGRATION,
}
KEY_DEVICE_BASIC_INFORMATION = "device_basic_information"
KEY_DEVICE_INFORMATION = "device_information"
KEY_DEVICE_SIGNAL = "device_signal"
KEY_DIALUP_MOBILE_DATASWITCH = "dialup_mobile_dataswitch"
KEY_MONITORING_MONTH_STATISTICS = "monitoring_month_statistics"
KEY_MONITORING_STATUS = "monitoring_status"
KEY_MONITORING_TRAFFIC_STATISTICS = "monitoring_traffic_statistics"
KEY_NET_CURRENT_PLMN = "net_current_plmn"
KEY_NET_NET_MODE = "net_net_mode"
KEY_SMS_SMS_COUNT = "sms_sms_count"
KEY_WLAN_HOST_LIST = "wlan_host_list"
KEY_WLAN_WIFI_FEATURE_SWITCH = "wlan_wifi_feature_switch"
BINARY_SENSOR_KEYS = {KEY_MONITORING_STATUS, KEY_WLAN_WIFI_FEATURE_SWITCH}
DEVICE_TRACKER_KEYS = {KEY_WLAN_HOST_LIST}
SENSOR_KEYS = {
KEY_DEVICE_INFORMATION,
KEY_DEVICE_SIGNAL,
KEY_MONITORING_MONTH_STATISTICS,
KEY_MONITORING_STATUS,
KEY_MONITORING_TRAFFIC_STATISTICS,
KEY_NET_CURRENT_PLMN,
KEY_NET_NET_MODE,
KEY_SMS_SMS_COUNT,
}
SWITCH_KEYS = {KEY_DIALUP_MOBILE_DATASWITCH}
ALL_KEYS = BINARY_SENSOR_KEYS | DEVICE_TRACKER_KEYS | SENSOR_KEYS | SWITCH_KEYS
|
kuno/ansible
|
refs/heads/devel
|
lib/ansible/runner/connection_plugins/ssh.py
|
1
|
# (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
import os
import subprocess
import shlex
import pipes
import random
import select
import fcntl
import pwd
import ansible.constants as C
from ansible.callbacks import vvv
from ansible import errors
from ansible import utils
class Connection(object):
''' ssh based connections '''
def __init__(self, runner, host, port, user, password, private_key_file, *args, **kwargs):
self.runner = runner
self.host = host
self.port = port
self.user = user
self.password = password
self.private_key_file = private_key_file
def connect(self):
''' connect to the remote host '''
vvv("ESTABLISH CONNECTION FOR USER: %s" % self.user, host=self.host)
self.common_args = []
extra_args = C.ANSIBLE_SSH_ARGS
if extra_args is not None:
self.common_args += shlex.split(extra_args)
else:
self.common_args += ["-o", "ControlMaster=auto",
"-o", "ControlPersist=60s",
"-o", "ControlPath=/tmp/ansible-ssh-%h-%p-%r"]
self.common_args += ["-o", "StrictHostKeyChecking=no"]
if self.port is not None:
self.common_args += ["-o", "Port=%d" % (self.port)]
if self.private_key_file is not None:
self.common_args += ["-o", "IdentityFile="+os.path.expanduser(self.private_key_file)]
elif self.runner.private_key_file is not None:
self.common_args += ["-o", "IdentityFile="+os.path.expanduser(self.runner.private_key_file)]
if self.password:
self.common_args += ["-o", "GSSAPIAuthentication=no",
"-o", "PubkeyAuthentication=no"]
else:
self.common_args += ["-o", "KbdInteractiveAuthentication=no",
"-o", "PasswordAuthentication=no"]
if self.user != pwd.getpwuid(os.geteuid())[0]:
self.common_args += ["-o", "User="+self.user]
self.common_args += ["-o", "ConnectTimeout=%d" % self.runner.timeout]
return self
def _password_cmd(self):
if self.password:
try:
p = subprocess.Popen(["sshpass"], stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
p.communicate()
except OSError:
raise errors.AnsibleError("to use -c ssh with passwords, you must install the sshpass program")
(self.rfd, self.wfd) = os.pipe()
return ["sshpass", "-d%d" % self.rfd]
return []
def _send_password(self):
if self.password:
os.close(self.rfd)
os.write(self.wfd, "%s\n" % self.password)
os.close(self.wfd)
def exec_command(self, cmd, tmp_path, sudo_user,sudoable=False, executable='/bin/sh'):
''' run a command on the remote host '''
ssh_cmd = self._password_cmd()
ssh_cmd += ["ssh", "-tt", "-q"] + self.common_args + [self.host]
if not self.runner.sudo or not sudoable:
if executable:
ssh_cmd.append(executable + ' -c ' + pipes.quote(cmd))
else:
ssh_cmd.append(cmd)
else:
sudocmd, prompt = utils.make_sudo_cmd(sudo_user, executable, cmd)
ssh_cmd.append(sudocmd)
vvv("EXEC %s" % ssh_cmd, host=self.host)
try:
# Make sure stdin is a proper (pseudo) pty to avoid: tcgetattr errors
import pty
master, slave = pty.openpty()
p = subprocess.Popen(ssh_cmd, stdin=slave,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = os.fdopen(master, 'w', 0)
except:
p = subprocess.Popen(ssh_cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdin = p.stdin
self._send_password()
if self.runner.sudo and sudoable and self.runner.sudo_pass:
fcntl.fcntl(p.stdout, fcntl.F_SETFL,
fcntl.fcntl(p.stdout, fcntl.F_GETFL) | os.O_NONBLOCK)
sudo_output = ''
while not sudo_output.endswith(prompt):
rfd, wfd, efd = select.select([p.stdout], [],
[p.stdout], self.runner.timeout)
if p.stdout in rfd:
chunk = p.stdout.read()
if not chunk:
raise errors.AnsibleError('ssh connection closed waiting for sudo password prompt')
sudo_output += chunk
else:
stdout = p.communicate()
raise errors.AnsibleError('ssh connection error waiting for sudo password prompt')
stdin.write(self.runner.sudo_pass + '\n')
fcntl.fcntl(p.stdout, fcntl.F_SETFL, fcntl.fcntl(p.stdout, fcntl.F_GETFL) & ~os.O_NONBLOCK)
# We can't use p.communicate here because the ControlMaster may have stdout open as well
stdout = ''
stderr = ''
while True:
rfd, wfd, efd = select.select([p.stdout, p.stderr], [], [p.stdout, p.stderr], 1)
if p.stdout in rfd:
dat = os.read(p.stdout.fileno(), 9000)
stdout += dat
if dat == '':
p.wait()
break
elif p.stderr in rfd:
dat = os.read(p.stderr.fileno(), 9000)
stderr += dat
if dat == '':
p.wait()
break
elif p.poll() is not None:
break
stdin.close() # close stdin after we read from stdout (see also issue #848)
if p.returncode != 0 and stderr.find('Bad configuration option: ControlPersist') != -1:
raise errors.AnsibleError('using -c ssh on certain older ssh versions may not support ControlPersist, set ANSIBLE_SSH_ARGS="" (or ansible_ssh_args in the config file) before running again')
return (p.returncode, '', stdout, stderr)
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
if not os.path.exists(in_path):
raise errors.AnsibleFileNotFound("file or module does not exist: %s" % in_path)
cmd = self._password_cmd()
if C.DEFAULT_SCP_IF_SSH:
cmd += ["scp"] + self.common_args
cmd += [in_path,self.host + ":" + out_path]
indata = None
else:
cmd += ["sftp"] + self.common_args + [self.host]
indata = "put %s %s\n" % (in_path, out_path)
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self._send_password()
stdout, stderr = p.communicate(indata)
if p.returncode != 0:
raise errors.AnsibleError("failed to transfer file to %s:\n%s\n%s" % (out_path, stdout, stderr))
def fetch_file(self, in_path, out_path):
''' fetch a file from remote to local '''
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
cmd = self._password_cmd()
if C.DEFAULT_SCP_IF_SSH:
cmd += ["scp"] + self.common_args
cmd += [self.host + ":" + in_path, out_path]
indata = None
else:
cmd += ["sftp"] + self.common_args + [self.host]
indata = "get %s %s\n" % (in_path, out_path)
p = subprocess.Popen(cmd, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
self._send_password()
stdout, stderr = p.communicate(indata)
if p.returncode != 0:
raise errors.AnsibleError("failed to transfer file from %s:\n%s\n%s" % (in_path, stdout, stderr))
def close(self):
''' not applicable since we're executing openssh binaries '''
pass
|
lstorchi/pca_fit
|
refs/heads/master
|
utilities/reader_and.py
|
1
|
import numpy
import math
import sys
import re
from scipy import stats
####################################################################
def file_len(fname):
with open(fname) as f:
for i, l in enumerate(f):
pass
return i + 1
####################################################################
filename = "noname.txt"
if (len(sys.argv) == 1):
print >> sys.stderr, "usage: ", sys.argv[0], " filename.txt"
exit(1)
else:
filename = sys.argv[1]
numofline = file_len(filename)
fp = open(filename, "r")
# jump first line
fp.readline()
meanval1 = []
meanval2 = []
meanval3 = []
meanval4 = []
meanval5 = []
rzmeanval1 = []
rzmeanval2 = []
rzmeanval3 = []
rzmeanval4 = []
rzmeanval5 = []
rphimeanval1 = []
rphimeanval2 = []
rphimeanval3 = []
rphimeanval4 = []
rphimeanval5 = []
for i in range(numofline):
l = fp.readline()
if not l:
break
p = re.compile(r'\s+')
line = p.sub(' ', l)
line = line.lstrip()
line = line.rstrip()
plist = line.split(" ")
numof = int(plist[1])
layersids = ""
layerold = ""
xold = 0.0
yold = 0.0
zold = 0.0
rold = 0.0
phiold = 0.0
val1 = 0.0
val2 = 0.0
val3 = 0.0
val4 = 0.0
val5 = 0.0
rzval1 = 0.0
rzval2 = 0.0
rzval3 = 0.0
rzval4 = 0.0
rzval5 = 0.0
rphival1 = 0.0
rphival2 = 0.0
rphival3 = 0.0
rphival4 = 0.0
rphival5 = 0.0
for j in range(numof):
coordline = fp.readline()
coordline = p.sub(' ', coordline)
coordline = coordline.lstrip()
coordline = coordline.rstrip()
coordlinelist = coordline.split(" ")
layersids += coordlinelist[3]
pid = int(coordlinelist[7])
if (pid > 0):
charge = 1.0
else:
charge = -1.0
xi = float(coordlinelist[0])
yi = float(coordlinelist[1])
zi = float(coordlinelist[2])
ri = math.sqrt(math.pow(xi, 2.0) + math.pow (yi, 2.0))
phii = math.acos(xi/ri)
if (j > 0):
dist = math.sqrt((xi-xold)**2 + (yi-yold)**2 + (zi-zold)**2)
distrz = math.sqrt((ri-rold)**2 + (zi-zold)**2)
distrphi = math.sqrt((ri-rold)**2 + (phii-phiold)**2)
if j == 1:
val1 = dist
rzval1 = distrz
rphival1 = distrphi
elif j == 2:
val2 = dist
rzval2 = distrz
rphival2 = distrphi
elif j == 3:
val3 = dist
rzval3 = distrz
rphival3 = distrphi
elif j == 4:
val4 = dist
rzval4 = distrz
rphival4 = distrphi
elif j == 5:
val5 = dist
rzval5 = distrz
rphival5 = distrphi
layerold = coordlinelist[3]
xold = xi
yold = yi
zold = zi
rold = ri
phiold = phii
paramline = fp.readline()
paramline = p.sub(' ', paramline)
paramline = paramline.lstrip()
paramline = paramline.rstrip()
paramlinelist = paramline.split(" ")
pt = float(paramlinelist[0])
phi = float(paramlinelist[1])
eta = float(paramlinelist[3])
z0 = float(paramlinelist[4])
theta = 2.0 * math.atan (math.exp(-eta))
pz = pt * math.cos(theta)
# quick check for layers id
touse = (layersids == "5678910") # and (eta > -0.6) and (eta < -0.55)
if touse:
meanval1.append(val1)
meanval2.append(val2)
meanval3.append(val3)
meanval4.append(val4)
meanval5.append(val5)
rzmeanval1.append(val1)
rzmeanval2.append(val2)
rzmeanval3.append(val3)
rzmeanval4.append(val4)
rzmeanval5.append(val5)
rphimeanval1.append(val1)
rphimeanval2.append(val2)
rphimeanval3.append(val3)
rphimeanval4.append(val4)
rphimeanval5.append(val5)
print "Using ", len(meanval1) , " events "
print " XYZ "
print " 5 6 ", numpy.mean(meanval1), " ", numpy.std(meanval1)
print " 6 7 ", numpy.mean(meanval2), " ", numpy.std(meanval2)
print " 7 8 ", numpy.mean(meanval3), " ", numpy.std(meanval3)
print " 8 9 ", numpy.mean(meanval4), " ", numpy.std(meanval4)
print " 9 10 ", numpy.mean(meanval5), " ", numpy.std(meanval5)
print " RZ "
print " 5 6 ", numpy.mean(rzmeanval1), " ", numpy.std(rzmeanval1)
print " 6 7 ", numpy.mean(rzmeanval2), " ", numpy.std(rzmeanval2)
print " 7 8 ", numpy.mean(rzmeanval3), " ", numpy.std(rzmeanval3)
print " 8 9 ", numpy.mean(rzmeanval4), " ", numpy.std(rzmeanval4)
print " 9 10 ", numpy.mean(rzmeanval5), " ", numpy.std(rzmeanval5)
print " RPHI "
print " 5 6 ", numpy.mean(rphimeanval1), " ", numpy.std(rphimeanval1)
print " 6 7 ", numpy.mean(rphimeanval2), " ", numpy.std(rphimeanval2)
print " 7 8 ", numpy.mean(rphimeanval3), " ", numpy.std(rphimeanval3)
print " 8 9 ", numpy.mean(rphimeanval4), " ", numpy.std(rphimeanval4)
print " 9 10 ", numpy.mean(rphimeanval5), " ", numpy.std(rphimeanval5)
fp.close()
|
asadziach/tensorflow
|
refs/heads/pedestrian_detection_walabot_tf
|
tensorflow/tools/docker/simple_console.py
|
603
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Start a simple interactive console with TensorFlow available."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import code
import sys
def main(_):
"""Run an interactive console."""
code.interact()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
mitchrule/Miscellaneous
|
refs/heads/master
|
Django_Project/django/Lib/site-packages/wheel/test/test_install.py
|
109
|
# Test wheel.
# The file has the following contents:
# hello.pyd
# hello/hello.py
# hello/__init__.py
# test-1.0.data/data/hello.dat
# test-1.0.data/headers/hello.dat
# test-1.0.data/scripts/hello.sh
# test-1.0.dist-info/WHEEL
# test-1.0.dist-info/METADATA
# test-1.0.dist-info/RECORD
# The root is PLATLIB
# So, some in PLATLIB, and one in each of DATA, HEADERS and SCRIPTS.
import wheel.tool
import wheel.pep425tags
from wheel.install import WheelFile
from tempfile import mkdtemp
import shutil
import os
THISDIR = os.path.dirname(__file__)
TESTWHEEL = os.path.join(THISDIR, 'test-1.0-py2.py3-none-win32.whl')
def check(*path):
return os.path.exists(os.path.join(*path))
def test_install():
tempdir = mkdtemp()
def get_supported():
return list(wheel.pep425tags.get_supported()) + [('py3', 'none', 'win32')]
whl = WheelFile(TESTWHEEL, context=get_supported)
assert whl.supports_current_python(get_supported)
try:
locs = {}
for key in ('purelib', 'platlib', 'scripts', 'headers', 'data'):
locs[key] = os.path.join(tempdir, key)
os.mkdir(locs[key])
whl.install(overrides=locs)
assert len(os.listdir(locs['purelib'])) == 0
assert check(locs['platlib'], 'hello.pyd')
assert check(locs['platlib'], 'hello', 'hello.py')
assert check(locs['platlib'], 'hello', '__init__.py')
assert check(locs['data'], 'hello.dat')
assert check(locs['headers'], 'hello.dat')
assert check(locs['scripts'], 'hello.sh')
assert check(locs['platlib'], 'test-1.0.dist-info', 'RECORD')
finally:
shutil.rmtree(tempdir)
def test_install_tool():
"""Slightly improve coverage of wheel.install"""
wheel.tool.install([TESTWHEEL], force=True, dry_run=True)
|
pvagner/orca
|
refs/heads/master
|
test/keystrokes/firefox/html_struct_nav_blockquote.py
|
1
|
#!/usr/bin/python
"""Test of structural navigation by blockquote."""
from macaroon.playback import *
import utils
sequence = MacroSequence()
sequence.append(KeyComboAction("<Control>Home"))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("q"))
sequence.append(utils.AssertPresentationAction(
"1. q to first quote",
["BRAILLE LINE: 'NOBODY expects the Spanish Inquisition! Our chief weapon is surprise. Surprise and fear. Fear and surprise. Our two weapons are fear and surprise. And ruthless efficiency. Our three weapons are fear, surprise, and ruthless efficiency. And an almost fanatical devotion to the Pope. Our four. No. Amongst our weapons. Amongst our weaponry, are such elements as fear, surprise. I'll come in again. NOBODY expects the Spanish Inquisition! Amongst our weaponry are such diverse elements as: fear, surprise, ruthless efficiency, an almost fanatical devotion to the Pope, and nice red uniforms - Oh damn!'",
" VISIBLE: 'NOBODY expects the Spanish Inqui', cursor=1",
"SPEECH OUTPUT: 'NOBODY expects the Spanish Inquisition! Our chief weapon is surprise. Surprise and fear. Fear and surprise. Our two weapons are fear and surprise. And ruthless efficiency. Our three weapons are fear, surprise, and ruthless efficiency. And an almost fanatical devotion to the Pope. Our four. No. Amongst our weapons. Amongst our weaponry, are such elements as fear, surprise. I'll come in again. NOBODY expects the Spanish Inquisition! Amongst our weaponry are such diverse elements as: fear, surprise, ruthless efficiency, an almost fanatical devotion to the Pope, and nice red uniforms - Oh damn!'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("q"))
sequence.append(utils.AssertPresentationAction(
"2. q to second quote",
["BRAILLE LINE: 'Now old lady, you have one last chance. Confess the heinous sin of heresy, reject the works of the ungodly. Two last chances. And you shall be free. Three last chances. You have three last chances, the nature of which I have divulged in my previous utterance.'",
" VISIBLE: 'Now old lady, you have one last ', cursor=1",
"SPEECH OUTPUT: 'Now old lady, you have one last chance. Confess the heinous sin of heresy, reject the works of the ungodly. Two last chances. And you shall be free. Three last chances. You have three last chances, the nature of which I have divulged in my previous utterance.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("q"))
sequence.append(utils.AssertPresentationAction(
"3. q to third quote",
["BRAILLE LINE: 'Hm! She is made of harder stuff! Cardinal Fang! Fetch the COMFY CHAIR!'",
" VISIBLE: 'Hm! She is made of harder stuff!', cursor=1",
"SPEECH OUTPUT: 'Hm! She is made of harder stuff! Cardinal Fang! Fetch the COMFY CHAIR!'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("q"))
sequence.append(utils.AssertPresentationAction(
"4. q wrap to top",
["BRAILLE LINE: 'Wrapping to top.'",
" VISIBLE: 'Wrapping to top.', cursor=0",
"BRAILLE LINE: 'NOBODY expects the Spanish Inquisition! Our chief weapon is surprise. Surprise and fear. Fear and surprise. Our two weapons are fear and surprise. And ruthless efficiency. Our three weapons are fear, surprise, and ruthless efficiency. And an almost fanatical devotion to the Pope. Our four. No. Amongst our weapons. Amongst our weaponry, are such elements as fear, surprise. I'll come in again. NOBODY expects the Spanish Inquisition! Amongst our weaponry are such diverse elements as: fear, surprise, ruthless efficiency, an almost fanatical devotion to the Pope, and nice red uniforms - Oh damn!'",
" VISIBLE: 'NOBODY expects the Spanish Inqui', cursor=1",
"SPEECH OUTPUT: 'Wrapping to top.' voice=system",
"SPEECH OUTPUT: 'NOBODY expects the Spanish Inquisition! Our chief weapon is surprise. Surprise and fear. Fear and surprise. Our two weapons are fear and surprise. And ruthless efficiency. Our three weapons are fear, surprise, and ruthless efficiency. And an almost fanatical devotion to the Pope. Our four. No. Amongst our weapons. Amongst our weaponry, are such elements as fear, surprise. I'll come in again. NOBODY expects the Spanish Inquisition! Amongst our weaponry are such diverse elements as: fear, surprise, ruthless efficiency, an almost fanatical devotion to the Pope, and nice red uniforms - Oh damn!'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Shift>q"))
sequence.append(utils.AssertPresentationAction(
"5. Shift+q wrap to bottom",
["BRAILLE LINE: 'Wrapping to bottom.'",
" VISIBLE: 'Wrapping to bottom.', cursor=0",
"BRAILLE LINE: 'Hm! She is made of harder stuff! Cardinal Fang! Fetch the COMFY CHAIR!'",
" VISIBLE: 'Hm! She is made of harder stuff!', cursor=1",
"SPEECH OUTPUT: 'Wrapping to bottom.' voice=system",
"SPEECH OUTPUT: 'Hm! She is made of harder stuff! Cardinal Fang! Fetch the COMFY CHAIR!'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("<Shift>q"))
sequence.append(utils.AssertPresentationAction(
"6. Shift+q to second quote",
["BRAILLE LINE: 'Now old lady, you have one last chance. Confess the heinous sin of heresy, reject the works of the ungodly. Two last chances. And you shall be free. Three last chances. You have three last chances, the nature of which I have divulged in my previous utterance.'",
" VISIBLE: 'Now old lady, you have one last ', cursor=1",
"SPEECH OUTPUT: 'Now old lady, you have one last chance. Confess the heinous sin of heresy, reject the works of the ungodly. Two last chances. And you shall be free. Three last chances. You have three last chances, the nature of which I have divulged in my previous utterance.'"]))
sequence.append(utils.StartRecordingAction())
sequence.append(KeyComboAction("KP_Enter"))
sequence.append(utils.AssertPresentationAction(
"7. Basic Where Am I",
["BRAILLE LINE: 'Now old lady, you have one last chance. Confess the heinous sin of heresy, reject the works of the '",
" VISIBLE: 'Now old lady, you have one last ', cursor=1",
"SPEECH OUTPUT: 'Now old lady, you have one last chance. Confess the heinous sin of heresy, reject the works of the '"]))
sequence.append(utils.AssertionSummaryAction())
sequence.start()
|
haldun/bookmarks
|
refs/heads/master
|
importer.py
|
1
|
import datetime
import hashlib
import logging
from lxml import etree
from pymongo.objectid import ObjectId
from bson.dbref import DBRef
class Importer(object):
def __init__(self, db, owner, contents):
self.db = db
self.owner = owner
self.contents = contents
def import_bookmarks(self):
collection = self.db.bookmarks
url_digests = dict((b['url_digest'], b['_id'])
for b in collection.find({'user': self.owner._id}, fields=['url_digest']))
root = etree.fromstring(self.contents, etree.HTMLParser())
bookmarks = list()
for link in root.xpath('//a'):
url = link.attrib.get('href')
if not url or not url.startswith('http'):
continue
title = link.text
url_digest = hashlib.md5(url.encode('utf8')).hexdigest()
bookmark = {
"user" : self.owner._id,
'url': url,
'url_digest': url_digest,
'title': title or url,
}
if url_digest in url_digests:
bookmark['_id'] = url_digests[url_digest]
if 'add_date' in link.attrib:
try:
bookmark['modified'] = datetime.datetime.fromtimestamp(float(link.attrib['add_date']))
except:
pass
if 'tags' in link.attrib:
bookmark['tags'] = link.attrib['tags'].split(',')
description_tag = link.getparent().getnext()
if description_tag is not None and description_tag.tag == 'dd':
bookmark['description'] = description_tag.text
bookmarks.append(bookmark)
if bookmarks:
collection.insert(bookmarks)
tasks = []
for bookmark in bookmarks:
tasks.append({
'url': bookmark['url'],
'user': self.owner._id,
'bookmark': bookmark['url_digest'],
'status': False,
})
self.db.tasks.insert(tasks)
|
gltn/stdm
|
refs/heads/master
|
stdm/third_party/migrate/versioning/schemadiff.py
|
78
|
"""
Schema differencing support.
"""
import logging
import sqlalchemy
from sqlalchemy.types import Float
log = logging.getLogger(__name__)
def getDiffOfModelAgainstDatabase(metadata, engine, excludeTables=None):
"""
Return differences of model against database.
:return: object which will evaluate to :keyword:`True` if there \
are differences else :keyword:`False`.
"""
db_metadata = sqlalchemy.MetaData(engine)
db_metadata.reflect()
# sqlite will include a dynamically generated 'sqlite_sequence' table if
# there are autoincrement sequences in the database; this should not be
# compared.
if engine.dialect.name == 'sqlite':
if 'sqlite_sequence' in db_metadata.tables:
db_metadata.remove(db_metadata.tables['sqlite_sequence'])
return SchemaDiff(metadata, db_metadata,
labelA='model',
labelB='database',
excludeTables=excludeTables)
def getDiffOfModelAgainstModel(metadataA, metadataB, excludeTables=None):
"""
Return differences of model against another model.
:return: object which will evaluate to :keyword:`True` if there \
are differences else :keyword:`False`.
"""
return SchemaDiff(metadataA, metadataB, excludeTables=excludeTables)
class ColDiff(object):
"""
Container for differences in one :class:`~sqlalchemy.schema.Column`
between two :class:`~sqlalchemy.schema.Table` instances, ``A``
and ``B``.
.. attribute:: col_A
The :class:`~sqlalchemy.schema.Column` object for A.
.. attribute:: col_B
The :class:`~sqlalchemy.schema.Column` object for B.
.. attribute:: type_A
The most generic type of the :class:`~sqlalchemy.schema.Column`
object in A.
.. attribute:: type_B
The most generic type of the :class:`~sqlalchemy.schema.Column`
object in A.
"""
diff = False
def __init__(self,col_A,col_B):
self.col_A = col_A
self.col_B = col_B
self.type_A = col_A.type
self.type_B = col_B.type
self.affinity_A = self.type_A._type_affinity
self.affinity_B = self.type_B._type_affinity
if self.affinity_A is not self.affinity_B:
self.diff = True
return
if isinstance(self.type_A,Float) or isinstance(self.type_B,Float):
if not (isinstance(self.type_A,Float) and isinstance(self.type_B,Float)):
self.diff=True
return
for attr in ('precision','scale','length'):
A = getattr(self.type_A,attr,None)
B = getattr(self.type_B,attr,None)
if not (A is None or B is None) and A!=B:
self.diff=True
return
def __nonzero__(self):
return self.diff
__bool__ = __nonzero__
class TableDiff(object):
"""
Container for differences in one :class:`~sqlalchemy.schema.Table`
between two :class:`~sqlalchemy.schema.MetaData` instances, ``A``
and ``B``.
.. attribute:: columns_missing_from_A
A sequence of column names that were found in B but weren't in
A.
.. attribute:: columns_missing_from_B
A sequence of column names that were found in A but weren't in
B.
.. attribute:: columns_different
A dictionary containing information about columns that were
found to be different.
It maps column names to a :class:`ColDiff` objects describing the
differences found.
"""
__slots__ = (
'columns_missing_from_A',
'columns_missing_from_B',
'columns_different',
)
def __nonzero__(self):
return bool(
self.columns_missing_from_A or
self.columns_missing_from_B or
self.columns_different
)
__bool__ = __nonzero__
class SchemaDiff(object):
"""
Compute the difference between two :class:`~sqlalchemy.schema.MetaData`
objects.
The string representation of a :class:`SchemaDiff` will summarise
the changes found between the two
:class:`~sqlalchemy.schema.MetaData` objects.
The length of a :class:`SchemaDiff` will give the number of
changes found, enabling it to be used much like a boolean in
expressions.
:param metadataA:
First :class:`~sqlalchemy.schema.MetaData` to compare.
:param metadataB:
Second :class:`~sqlalchemy.schema.MetaData` to compare.
:param labelA:
The label to use in messages about the first
:class:`~sqlalchemy.schema.MetaData`.
:param labelB:
The label to use in messages about the second
:class:`~sqlalchemy.schema.MetaData`.
:param excludeTables:
A sequence of table names to exclude.
.. attribute:: tables_missing_from_A
A sequence of table names that were found in B but weren't in
A.
.. attribute:: tables_missing_from_B
A sequence of table names that were found in A but weren't in
B.
.. attribute:: tables_different
A dictionary containing information about tables that were found
to be different.
It maps table names to a :class:`TableDiff` objects describing the
differences found.
"""
def __init__(self,
metadataA, metadataB,
labelA='metadataA',
labelB='metadataB',
excludeTables=None):
self.metadataA, self.metadataB = metadataA, metadataB
self.labelA, self.labelB = labelA, labelB
self.label_width = max(len(labelA),len(labelB))
excludeTables = set(excludeTables or [])
A_table_names = set(metadataA.tables.keys())
B_table_names = set(metadataB.tables.keys())
self.tables_missing_from_A = sorted(
B_table_names - A_table_names - excludeTables
)
self.tables_missing_from_B = sorted(
A_table_names - B_table_names - excludeTables
)
self.tables_different = {}
for table_name in A_table_names.intersection(B_table_names):
td = TableDiff()
A_table = metadataA.tables[table_name]
B_table = metadataB.tables[table_name]
A_column_names = set(A_table.columns.keys())
B_column_names = set(B_table.columns.keys())
td.columns_missing_from_A = sorted(
B_column_names - A_column_names
)
td.columns_missing_from_B = sorted(
A_column_names - B_column_names
)
td.columns_different = {}
for col_name in A_column_names.intersection(B_column_names):
cd = ColDiff(
A_table.columns.get(col_name),
B_table.columns.get(col_name)
)
if cd:
td.columns_different[col_name]=cd
# XXX - index and constraint differences should
# be checked for here
if td:
self.tables_different[table_name]=td
def __str__(self):
''' Summarize differences. '''
out = []
column_template =' %%%is: %%r' % self.label_width
for names,label in (
(self.tables_missing_from_A,self.labelA),
(self.tables_missing_from_B,self.labelB),
):
if names:
out.append(
' tables missing from %s: %s' % (
label,', '.join(sorted(names))
)
)
for name,td in sorted(self.tables_different.items()):
out.append(
' table with differences: %s' % name
)
for names,label in (
(td.columns_missing_from_A,self.labelA),
(td.columns_missing_from_B,self.labelB),
):
if names:
out.append(
' %s missing these columns: %s' % (
label,', '.join(sorted(names))
)
)
for name,cd in td.columns_different.items():
out.append(' column with differences: %s' % name)
out.append(column_template % (self.labelA,cd.col_A))
out.append(column_template % (self.labelB,cd.col_B))
if out:
out.insert(0, 'Schema diffs:')
return '\n'.join(out)
else:
return 'No schema diffs'
def __len__(self):
"""
Used in bool evaluation, return of 0 means no diffs.
"""
return (
len(self.tables_missing_from_A) +
len(self.tables_missing_from_B) +
len(self.tables_different)
)
|
drpaneas/linuxed.gr
|
refs/heads/master
|
lib/python2.7/site-packages/paramiko/ecdsakey.py
|
3
|
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
ECDSA keys
"""
import binascii
from hashlib import sha256
from ecdsa import SigningKey, VerifyingKey, der, curves
from paramiko.common import four_byte, one_byte
from paramiko.message import Message
from paramiko.pkey import PKey
from paramiko.py3compat import byte_chr, u
from paramiko.ssh_exception import SSHException
class ECDSAKey (PKey):
"""
Representation of an ECDSA key which can be used to sign and verify SSH2
data.
"""
def __init__(self, msg=None, data=None, filename=None, password=None,
vals=None, file_obj=None, validate_point=True):
self.verifying_key = None
self.signing_key = None
if file_obj is not None:
self._from_private_key(file_obj, password)
return
if filename is not None:
self._from_private_key_file(filename, password)
return
if (msg is None) and (data is not None):
msg = Message(data)
if vals is not None:
self.signing_key, self.verifying_key = vals
else:
if msg is None:
raise SSHException('Key object may not be empty')
if msg.get_text() != 'ecdsa-sha2-nistp256':
raise SSHException('Invalid key')
curvename = msg.get_text()
if curvename != 'nistp256':
raise SSHException("Can't handle curve of type %s" % curvename)
pointinfo = msg.get_binary()
if pointinfo[0:1] != four_byte:
raise SSHException('Point compression is being used: %s' %
binascii.hexlify(pointinfo))
self.verifying_key = VerifyingKey.from_string(pointinfo[1:],
curve=curves.NIST256p,
validate_point=validate_point)
self.size = 256
def asbytes(self):
key = self.verifying_key
m = Message()
m.add_string('ecdsa-sha2-nistp256')
m.add_string('nistp256')
point_str = four_byte + key.to_string()
m.add_string(point_str)
return m.asbytes()
def __str__(self):
return self.asbytes()
def __hash__(self):
h = hash(self.get_name())
h = h * 37 + hash(self.verifying_key.pubkey.point.x())
h = h * 37 + hash(self.verifying_key.pubkey.point.y())
return hash(h)
def get_name(self):
return 'ecdsa-sha2-nistp256'
def get_bits(self):
return self.size
def can_sign(self):
return self.signing_key is not None
def sign_ssh_data(self, data):
sig = self.signing_key.sign_deterministic(
data, sigencode=self._sigencode, hashfunc=sha256)
m = Message()
m.add_string('ecdsa-sha2-nistp256')
m.add_string(sig)
return m
def verify_ssh_sig(self, data, msg):
if msg.get_text() != 'ecdsa-sha2-nistp256':
return False
sig = msg.get_binary()
# verify the signature by SHA'ing the data and encrypting it
# using the public key.
hash_obj = sha256(data).digest()
return self.verifying_key.verify_digest(sig, hash_obj,
sigdecode=self._sigdecode)
def write_private_key_file(self, filename, password=None):
key = self.signing_key or self.verifying_key
self._write_private_key_file('EC', filename, key.to_der(), password)
def write_private_key(self, file_obj, password=None):
key = self.signing_key or self.verifying_key
self._write_private_key('EC', file_obj, key.to_der(), password)
def generate(curve=curves.NIST256p, progress_func=None):
"""
Generate a new private RSA key. This factory function can be used to
generate a new host key or authentication key.
:param function progress_func:
an optional function to call at key points in key generation (used
by ``pyCrypto.PublicKey``).
:returns: A new private key (`.RSAKey`) object
"""
signing_key = SigningKey.generate(curve)
key = ECDSAKey(vals=(signing_key, signing_key.get_verifying_key()))
return key
generate = staticmethod(generate)
### internals...
def _from_private_key_file(self, filename, password):
data = self._read_private_key_file('EC', filename, password)
self._decode_key(data)
def _from_private_key(self, file_obj, password):
data = self._read_private_key('EC', file_obj, password)
self._decode_key(data)
ALLOWED_PADDINGS = [one_byte, byte_chr(2) * 2, byte_chr(3) * 3, byte_chr(4) * 4,
byte_chr(5) * 5, byte_chr(6) * 6, byte_chr(7) * 7]
def _decode_key(self, data):
s, padding = der.remove_sequence(data)
if padding:
if padding not in self.ALLOWED_PADDINGS:
raise ValueError("weird padding: %s" % u(binascii.hexlify(data)))
data = data[:-len(padding)]
key = SigningKey.from_der(data)
self.signing_key = key
self.verifying_key = key.get_verifying_key()
self.size = 256
def _sigencode(self, r, s, order):
msg = Message()
msg.add_mpint(r)
msg.add_mpint(s)
return msg.asbytes()
def _sigdecode(self, sig, order):
msg = Message(sig)
r = msg.get_mpint()
s = msg.get_mpint()
return r, s
|
qt911025/pw_module_system
|
refs/heads/pw
|
module_sounds.py
|
2
|
from header_sounds import *
# Many of these sound entries are hard coded into the engine, and should not be removed; to disable them, empty the sound file list.
# Add your own sounds just before the animation sounds group, or before sounds_end.
sounds = [
("click", sf_2d|sf_priority_9|sf_vol_3, ["drum_3.ogg"]),
("tutorial_1", sf_2d|sf_priority_9|sf_vol_7, ["tutorial_1.ogg"]),
("tutorial_2", sf_2d|sf_priority_9|sf_vol_7, ["tutorial_2.ogg"]),
("gong", sf_2d|sf_priority_9|sf_vol_7, ["s_cymbals.ogg"]),
("quest_taken", sf_2d|sf_priority_9|sf_vol_7, []),
("quest_completed", sf_2d|sf_priority_9|sf_vol_8, ["quest_completed.ogg"]),
("quest_succeeded", sf_2d|sf_priority_9|sf_vol_6, ["quest_succeeded.ogg"]),
("quest_concluded", sf_2d|sf_priority_9|sf_vol_7, ["drum_3.ogg"]),
("quest_failed", sf_2d|sf_priority_9|sf_vol_7, ["quest_failed.ogg"]),
("quest_cancelled", sf_2d|sf_priority_9|sf_vol_7, ["quest_cancelled.ogg"]),
("rain", sf_2d|sf_priority_2|sf_vol_4|sf_looping, ["rain_1.ogg"]),
("money_received", sf_priority_5|sf_vol_4, ["coins_dropped_1.ogg"]),
("money_paid", sf_priority_5|sf_vol_4, ["coins_dropped_2.ogg"]),
("sword_clash_1", 0, ["sword_clank_metal_09.ogg","sword_clank_metal_09b.ogg","sword_clank_metal_10.ogg","sword_clank_metal_10b.ogg","sword_clank_metal_12.ogg","sword_clank_metal_12b.ogg","sword_clank_metal_13.ogg","sword_clank_metal_13b.ogg"]),
("sword_clash_2", 0, ["drum_3.ogg"]),
("sword_clash_3", 0, ["drum_3.ogg"]),
("sword_swing", sf_priority_12|sf_vol_8, ["s_swordSwing.wav"]),
("footstep_grass", sf_priority_1|sf_vol_4, ["footstep_1.ogg","footstep_2.ogg","footstep_3.ogg","footstep_4.ogg"]),
("footstep_wood", sf_priority_1|sf_vol_6, ["footstep_wood_1.ogg","footstep_wood_2.ogg","footstep_wood_4.ogg"]),
("footstep_water", sf_priority_3|sf_vol_4, ["water_walk_1.ogg","water_walk_2.ogg","water_walk_3.ogg","water_walk_4.ogg"]),
("footstep_horse", sf_priority_3|sf_vol_8, ["drum_3.ogg"]),
("footstep_horse_1b", sf_priority_3|sf_vol_8, ["s_footstep_horse_4b.wav","s_footstep_horse_4f.wav","s_footstep_horse_5b.wav","s_footstep_horse_5f.wav"]),
("footstep_horse_1f", sf_priority_3|sf_vol_8, ["s_footstep_horse_2b.wav","s_footstep_horse_2f.wav","s_footstep_horse_3b.wav","s_footstep_horse_3f.wav"]),
("footstep_horse_2b", sf_priority_3|sf_vol_8, ["s_footstep_horse_2b.wav"]),
("footstep_horse_2f", sf_priority_3|sf_vol_8, ["s_footstep_horse_2f.wav"]),
("footstep_horse_3b", sf_priority_3|sf_vol_8, ["s_footstep_horse_3b.wav"]),
("footstep_horse_3f", sf_priority_3|sf_vol_8, ["s_footstep_horse_3f.wav"]),
("footstep_horse_4b", sf_priority_3|sf_vol_8, ["s_footstep_horse_4b.wav"]),
("footstep_horse_4f", sf_priority_3|sf_vol_8, ["s_footstep_horse_4f.wav"]),
("footstep_horse_5b", sf_priority_3|sf_vol_8, ["s_footstep_horse_5b.wav"]),
("footstep_horse_5f", sf_priority_3|sf_vol_8, ["s_footstep_horse_5f.wav"]),
("jump_begin", sf_priority_9|sf_vol_7, ["jump_begin.ogg"]),
("jump_end", sf_priority_9|sf_vol_5, ["jump_end.ogg"]),
("jump_begin_water", sf_priority_9|sf_vol_4, ["jump_begin_water.ogg"]),
("jump_end_water", sf_priority_9|sf_vol_4, ["jump_end_water.ogg"]),
("horse_jump_begin", sf_priority_9|sf_vol_4, ["horse_jump_begin.ogg"]),
("horse_jump_end", sf_priority_9|sf_vol_4, ["horse_jump_end.ogg"]),
("horse_jump_begin_water", sf_priority_9|sf_vol_5, ["jump_begin_water.ogg"]),
("horse_jump_end_water", sf_priority_9|sf_vol_5, ["jump_end_water.ogg"]),
("release_bow", sf_priority_10|sf_vol_5, ["release_bow_1.ogg"]),
("release_crossbow", sf_priority_10|sf_vol_7, ["release_crossbow_1.ogg"]),
("throw_javelin", sf_priority_10|sf_vol_5, ["throw_javelin_2.ogg"]),
("throw_axe", sf_priority_10|sf_vol_7, ["throw_axe_1.ogg"]),
("throw_knife", sf_priority_10|sf_vol_5, ["throw_knife_1.ogg"]),
("throw_stone", sf_priority_10|sf_vol_7, ["throw_stone_1.ogg"]),
("reload_crossbow", sf_priority_8|sf_vol_3, ["reload_crossbow_1.ogg"]),
("reload_crossbow_continue", sf_priority_4|sf_vol_6, ["put_back_dagger.ogg"]),
("pull_bow", sf_priority_10|sf_vol_4, ["pull_bow_1.ogg"]),
("pull_arrow", sf_priority_4|sf_vol_5, ["pull_arrow.ogg"]),
("arrow_pass_by", sf_priority_9|sf_vol_10, ["arrow_pass_by_1.ogg","arrow_pass_by_2.ogg","arrow_pass_by_3.ogg","arrow_pass_by_4.ogg"]),
("bolt_pass_by", sf_priority_9|sf_vol_10, ["bolt_pass_by_1.ogg"]),
("javelin_pass_by", sf_priority_9|sf_vol_10, ["javelin_pass_by_1.ogg","javelin_pass_by_2.ogg"]),
("stone_pass_by", sf_priority_9|sf_vol_9, ["stone_pass_by_1.ogg"]),
("axe_pass_by", sf_priority_9|sf_vol_10, ["axe_pass_by_1.ogg"]),
("knife_pass_by", sf_priority_9|sf_vol_10, ["knife_pass_by_1.ogg"]),
("bullet_pass_by", sf_priority_9|sf_vol_10, ["arrow_whoosh_1.ogg"]),
("incoming_arrow_hit_ground", sf_priority_7|sf_vol_7, ["arrow_hit_ground_2.ogg","arrow_hit_ground_3.ogg","incoming_bullet_hit_ground_1.ogg"]),
("incoming_bolt_hit_ground", sf_priority_7|sf_vol_7, ["arrow_hit_ground_2.ogg","arrow_hit_ground_3.ogg","incoming_bullet_hit_ground_1.ogg"]),
("incoming_javelin_hit_ground", sf_priority_7|sf_vol_7, ["incoming_javelin_hit_ground_1.ogg"]),
("incoming_stone_hit_ground", sf_priority_7|sf_vol_7, ["incoming_stone_hit_ground_1.ogg"]),
("incoming_axe_hit_ground", sf_priority_7|sf_vol_7, ["incoming_javelin_hit_ground_1.ogg"]),
("incoming_knife_hit_ground", sf_priority_7|sf_vol_7, ["incoming_stone_hit_ground_1.ogg"]),
("incoming_bullet_hit_ground", sf_priority_7|sf_vol_7, ["incoming_bullet_hit_ground_1.ogg"]),
("outgoing_arrow_hit_ground", sf_priority_6|sf_vol_7, ["outgoing_arrow_hit_ground.ogg"]),
("outgoing_bolt_hit_ground", sf_priority_6|sf_vol_7, ["outgoing_arrow_hit_ground.ogg"]),
("outgoing_javelin_hit_ground", sf_priority_6|sf_vol_10, ["outgoing_arrow_hit_ground.ogg"]),
("outgoing_stone_hit_ground", sf_priority_6|sf_vol_7, ["incoming_stone_hit_ground_1.ogg"]),
("outgoing_axe_hit_ground", sf_priority_6|sf_vol_7, ["incoming_javelin_hit_ground_1.ogg"]),
("outgoing_knife_hit_ground", sf_priority_6|sf_vol_7, ["incoming_stone_hit_ground_1.ogg"]),
("outgoing_bullet_hit_ground", sf_priority_6|sf_vol_7, ["incoming_bullet_hit_ground_1.ogg"]),
("draw_sword", sf_priority_8|sf_vol_4, ["draw_sword.ogg"]),
("put_back_sword", sf_priority_6|sf_vol_4, ["put_back_sword.ogg"]),
("draw_greatsword", sf_priority_9|sf_vol_4, ["draw_greatsword.ogg"]),
("put_back_greatsword", sf_priority_6|sf_vol_4, ["put_back_sword.ogg"]),
("draw_axe", sf_priority_8|sf_vol_4, ["draw_mace.ogg"]),
("put_back_axe", sf_priority_6|sf_vol_4, ["put_back_to_holster.ogg"]),
("draw_greataxe", sf_priority_9|sf_vol_4, ["draw_greataxe.ogg"]),
("put_back_greataxe", sf_priority_6|sf_vol_4, ["put_back_to_leather.ogg"]),
("draw_spear", sf_priority_7|sf_vol_4, ["draw_spear.ogg"]),
("put_back_spear", sf_priority_5|sf_vol_4, ["put_back_to_leather.ogg"]),
("draw_crossbow", sf_priority_7|sf_vol_4, ["draw_crossbow.ogg"]),
("put_back_crossbow", sf_priority_5|sf_vol_4, ["put_back_to_leather.ogg"]),
("draw_revolver", sf_priority_8|sf_vol_4, ["draw_from_holster.ogg"]),
("put_back_revolver", sf_priority_6|sf_vol_4, ["put_back_to_holster.ogg"]),
("draw_dagger", sf_priority_8|sf_vol_4, ["draw_dagger.ogg"]),
("put_back_dagger", sf_priority_6|sf_vol_4, ["put_back_dagger.ogg"]),
("draw_bow", sf_priority_7|sf_vol_4, ["draw_bow.ogg"]),
("put_back_bow", sf_priority_5|sf_vol_4, ["put_back_to_holster.ogg"]),
("draw_shield", sf_priority_4|sf_vol_3, ["draw_shield.ogg"]),
("put_back_shield", sf_priority_4|sf_vol_3, ["put_back_shield.ogg"]),
("draw_other", sf_priority_8|sf_vol_4, ["draw_other.ogg"]),
("put_back_other", sf_priority_6|sf_vol_4, ["draw_other2.ogg"]),
("body_fall_small", sf_priority_7|sf_vol_8, ["body_fall_small_1.ogg","body_fall_small_2.ogg"]),
("body_fall_big", sf_priority_9|sf_vol_9, ["body_fall_1.ogg","body_fall_2.ogg","body_fall_3.ogg"]),
("horse_body_fall_begin", sf_priority_9|sf_vol_10, ["horse_body_fall_begin_1.ogg"]),
("horse_body_fall_end", sf_priority_9|sf_vol_10, ["horse_body_fall_end_1.ogg","body_fall_2.ogg"]),
("hit_wood_wood", sf_priority_11|sf_vol_9, ["hit_wood_wood_1.ogg","hit_wood_wood_2.ogg","hit_wood_wood_3.ogg","hit_wood_wood_4.ogg","hit_wood_metal_4.ogg","hit_wood_metal_5.ogg","hit_wood_metal_6.ogg"]),
("hit_metal_metal", sf_priority_11|sf_vol_10, ["hit_metal_metal_3.ogg","hit_metal_metal_4.ogg","hit_metal_metal_5.ogg","hit_metal_metal_6.ogg","hit_metal_metal_7.ogg","hit_metal_metal_8.ogg","hit_metal_metal_9.ogg","hit_metal_metal_10.ogg","clang_metal_1.ogg","clang_metal_2.ogg"]),
("hit_wood_metal", sf_priority_11|sf_vol_10, ["hit_metal_metal_1.ogg","hit_metal_metal_2.ogg","hit_wood_metal_7.ogg"]),
("shield_hit_wood_wood", sf_priority_11|sf_vol_10, ["shield_hit_wood_wood_1.ogg","shield_hit_wood_wood_2.ogg","shield_hit_wood_wood_3.ogg"]),
("shield_hit_metal_metal", sf_priority_11|sf_vol_10, ["shield_hit_metal_metal_1.ogg","shield_hit_metal_metal_2.ogg","shield_hit_metal_metal_3.ogg","shield_hit_metal_metal_4.ogg"]),
("shield_hit_wood_metal", sf_priority_11|sf_vol_10, ["shield_hit_cut_3.ogg","shield_hit_cut_4.ogg","shield_hit_cut_5.ogg","shield_hit_cut_10.ogg"]),
("shield_hit_metal_wood", sf_priority_11|sf_vol_10, ["shield_hit_metal_wood_1.ogg","shield_hit_metal_wood_2.ogg","shield_hit_metal_wood_3.ogg"]),
("shield_broken", sf_priority_12|sf_vol_10, ["shield_broken.ogg"]),
("man_hit", sf_priority_11|sf_vol_8, ["man_hit_5.ogg","man_hit_6.ogg","man_hit_7.ogg","man_hit_8.ogg","man_hit_9.ogg","man_hit_10.ogg","man_hit_11.ogg","man_hit_12.ogg","man_hit_13.ogg","man_hit_14.ogg","man_hit_15.ogg","man_hit_17.ogg","man_hit_18.ogg","man_hit_19.ogg","man_hit_22.ogg","man_hit_29.ogg","man_hit_32.ogg","man_hit_47.ogg","man_hit_57.ogg","man_hit_59.ogg"]),
("man_die", sf_priority_12|sf_vol_10, ["man_death_1.ogg","man_death_8.ogg","man_death_8b.ogg","man_death_11.ogg","man_death_14.ogg","man_death_16.ogg","man_death_18.ogg","man_death_21.ogg","man_death_22.ogg","man_death_29.ogg","man_death_40.ogg","man_death_44.ogg","man_death_46.ogg","man_death_48.ogg","man_death_64.ogg"]),
("woman_hit", sf_priority_11|sf_vol_8, ["woman_hit_2.ogg","woman_hit_3.ogg","woman_hit_b_2.ogg","woman_hit_b_4.ogg","woman_hit_b_6.ogg","woman_hit_b_7.ogg","woman_hit_b_8.ogg","woman_hit_b_11.ogg","woman_hit_b_14.ogg","woman_hit_b_16.ogg"]),
("woman_die", sf_priority_12|sf_vol_10, ["woman_fall_1.ogg","woman_hit_b_5.ogg"]),
("woman_yell", sf_priority_9|sf_vol_8, ["woman_yell_1.ogg","woman_yell_2.ogg"]),
("hide", 0, ["s_hide.wav"]),
("unhide", 0, ["s_unhide.wav"]),
("neigh", sf_priority_1|sf_vol_1, []),
("gallop", sf_priority_10|sf_vol_3, ["horse_gallop_3.ogg","horse_gallop_4.ogg","horse_gallop_5.ogg"]),
("battle", sf_priority_10|sf_vol_4, ["battle.ogg"]),
("arrow_hit_body", sf_priority_9|sf_vol_10, ["arrow_hit_body_1.ogg","arrow_hit_body_2.ogg","arrow_hit_body_3.ogg"]),
("metal_hit_low_armor_low_damage", sf_priority_8|sf_vol_9, ["sword_hit_lo_armor_lo_dmg_1.ogg","sword_hit_lo_armor_lo_dmg_2.ogg","sword_hit_lo_armor_lo_dmg_3.ogg"]),
("metal_hit_low_armor_high_damage", sf_priority_10|sf_vol_9, ["sword_hit_lo_armor_hi_dmg_1.ogg","sword_hit_lo_armor_hi_dmg_2.ogg","sword_hit_lo_armor_hi_dmg_3.ogg"]),
("metal_hit_high_armor_low_damage", sf_priority_8|sf_vol_9, ["metal_hit_high_armor_low_damage.ogg","metal_hit_high_armor_low_damage_2.ogg","metal_hit_high_armor_low_damage_3.ogg"]),
("metal_hit_high_armor_high_damage", sf_priority_10|sf_vol_9, ["sword_hit_hi_armor_hi_dmg_1.ogg","sword_hit_hi_armor_hi_dmg_2.ogg","sword_hit_hi_armor_hi_dmg_3.ogg"]),
("wooden_hit_low_armor_low_damage", sf_priority_8|sf_vol_9, ["blunt_hit_low_1.ogg","blunt_hit_low_2.ogg","blunt_hit_low_3.ogg"]),
("wooden_hit_low_armor_high_damage", sf_priority_10|sf_vol_9, ["blunt_hit_high_1.ogg","blunt_hit_high_2.ogg","blunt_hit_high_3.ogg"]),
("wooden_hit_high_armor_low_damage", sf_priority_8|sf_vol_9, ["wooden_hit_high_armor_low_damage_1.ogg","wooden_hit_high_armor_low_damage_2.ogg"]),
("wooden_hit_high_armor_high_damage", sf_priority_10|sf_vol_9, ["blunt_hit_high_1.ogg","blunt_hit_high_2.ogg","blunt_hit_high_3.ogg"]),
("mp_arrow_hit_target", sf_2d|sf_priority_10|sf_vol_9, ["mp_arrow_hit_target.ogg"]),
("blunt_hit", sf_priority_9|sf_vol_9, ["punch_1.ogg","punch_4.ogg","punch_4.ogg","punch_5.ogg"]),
("player_hit_by_arrow", sf_priority_10|sf_vol_10, ["player_hit_by_arrow.ogg"]),
("release_crossbow_medium", sf_priority_4|sf_vol_7, ["release_crossbow_1.ogg"]),
("release_crossbow_far", sf_priority_3|sf_vol_7, ["release_crossbow_1.ogg"]),
("bullet_hit_body", sf_priority_6|sf_vol_7, ["player_hit_by_arrow.ogg"]),
("player_hit_by_bullet", sf_priority_10|sf_vol_10, ["player_hit_by_arrow.ogg"]),
("pistol_shot", sf_priority_12|sf_vol_10, ["fl_pistol.wav"]),
("man_grunt", sf_priority_6|sf_vol_4, ["man_excercise_1.ogg","man_excercise_2.ogg","man_excercise_4.ogg"]),
("man_breath_hard", sf_priority_7|sf_vol_8, ["man_ugh_1.ogg","man_ugh_2.ogg","man_ugh_4.ogg","man_ugh_7.ogg","man_ugh_12.ogg","man_ugh_13.ogg","man_ugh_17.ogg"]),
("man_stun", sf_priority_9|sf_vol_8, ["man_stun_1.ogg"]),
("man_grunt_long", sf_priority_7|sf_vol_7, ["man_grunt_1.ogg","man_grunt_2.ogg","man_grunt_3.ogg","man_grunt_5.ogg","man_grunt_13.ogg","man_grunt_14.ogg"]),
("man_yell", sf_priority_6|sf_vol_8, ["man_yell_4.ogg","man_yell_4_2.ogg","man_yell_7.ogg","man_yell_9.ogg","man_yell_11.ogg","man_yell_13.ogg","man_yell_15.ogg","man_yell_16.ogg","man_yell_17.ogg","man_yell_20.ogg","man_shortyell_4.ogg","man_shortyell_5.ogg","man_shortyell_6.ogg","man_shortyell_9.ogg","man_shortyell_11.ogg","man_shortyell_11b.ogg","man_yell_b_18.ogg","man_yell_22.ogg", "man_yell_c_20.ogg"]),
("man_warcry", sf_priority_8|sf_vol_10, ["man_insult_2.ogg","man_insult_3.ogg","man_insult_7.ogg","man_insult_9.ogg","man_insult_13.ogg","man_insult_15.ogg","man_insult_16.ogg"]),
("encounter_looters", sf_priority_8|sf_vol_8, ["encounter_river_pirates_5.ogg","encounter_river_pirates_6.ogg","encounter_river_pirates_9.ogg","encounter_river_pirates_10.ogg","encounter_river_pirates_4.ogg"]),
("encounter_bandits", sf_priority_8|sf_vol_8, ["encounter_bandit_2.ogg","encounter_bandit_9.ogg","encounter_bandit_12.ogg","encounter_bandit_13.ogg","encounter_bandit_15.ogg","encounter_bandit_16.ogg","encounter_bandit_10.ogg",]),
("encounter_farmers", sf_priority_8|sf_vol_8, ["encounter_farmer_2.ogg","encounter_farmer_5.ogg","encounter_farmer_7.ogg","encounter_farmer_9.ogg"]),
("encounter_sea_raiders", sf_priority_8|sf_vol_8, ["encounter_sea_raider_5.ogg","encounter_sea_raider_9.ogg","encounter_sea_raider_9b.ogg","encounter_sea_raider_10.ogg"]),
("encounter_steppe_bandits", sf_priority_8|sf_vol_8, ["encounter_steppe_bandit_3.ogg","encounter_steppe_bandit_3b.ogg","encounter_steppe_bandit_8.ogg","encounter_steppe_bandit_10.ogg","encounter_steppe_bandit_12.ogg"]),
("encounter_nobleman", sf_priority_8|sf_vol_8, ["encounter_nobleman_1.ogg"]),
("encounter_vaegirs_ally", sf_priority_8|sf_vol_8, ["encounter_vaegirs_ally.ogg","encounter_vaegirs_ally_2.ogg"]),
("encounter_vaegirs_neutral", sf_priority_8|sf_vol_8, ["encounter_vaegirs_neutral.ogg","encounter_vaegirs_neutral_2.ogg","encounter_vaegirs_neutral_3.ogg","encounter_vaegirs_neutral_4.ogg"]),
("encounter_vaegirs_enemy", sf_priority_8|sf_vol_8, ["encounter_vaegirs_neutral.ogg","encounter_vaegirs_neutral_2.ogg","encounter_vaegirs_neutral_3.ogg","encounter_vaegirs_neutral_4.ogg"]),
("sneak_town_halt", sf_priority_8|sf_vol_10, ["sneak_halt_1.ogg","sneak_halt_2.ogg"]),
("horse_walk", sf_priority_3|sf_vol_3, ["horse_walk_1.ogg","horse_walk_2.ogg","horse_walk_3.ogg","horse_walk_4.ogg"]),
("horse_trot", sf_priority_3|sf_vol_3, ["horse_trot_1.ogg","horse_trot_2.ogg","horse_trot_3.ogg","horse_trot_4.ogg"]),
("horse_canter", sf_priority_4|sf_vol_4, ["horse_canter_1.ogg","horse_canter_2.ogg","horse_canter_3.ogg","horse_canter_4.ogg"]),
("horse_gallop", sf_priority_5|sf_vol_4, ["horse_gallop_6.ogg","horse_gallop_7.ogg","horse_gallop_8.ogg","horse_gallop_9.ogg"]),
("horse_breath", sf_priority_1|sf_vol_4, ["horse_breath_4.ogg","horse_breath_5.ogg","horse_breath_6.ogg","horse_breath_7.ogg"]),
("horse_snort", sf_priority_1|sf_vol_1, ["horse_snort_1.ogg","horse_snort_2.ogg","horse_snort_3.ogg","horse_snort_4.ogg","horse_snort_5.ogg"]),
("horse_low_whinny", sf_priority_8|sf_vol_12, ["horse_whinny-1.ogg","horse_whinny-2.ogg"]),
("block_fist", sf_priority_9|sf_vol_10, ["block_fist_3.ogg","block_fist_4.ogg"]),
("man_hit_blunt_weak", sf_priority_9|sf_vol_10, ["man_hit_13.ogg","man_hit_29.ogg","man_hit_32.ogg","man_hit_47.ogg","man_hit_57.ogg"]),
("man_hit_blunt_strong", sf_priority_10|sf_vol_10, ["man_hit_13.ogg","man_hit_29.ogg","man_hit_32.ogg","man_hit_47.ogg","man_hit_57.ogg"]),
("man_hit_pierce_weak", sf_priority_9|sf_vol_10, ["man_hit_13.ogg","man_hit_29.ogg","man_hit_32.ogg","man_hit_47.ogg","man_hit_57.ogg"]),
("man_hit_pierce_strong", sf_priority_10|sf_vol_10, ["man_hit_13.ogg","man_hit_29.ogg","man_hit_32.ogg","man_hit_47.ogg","man_hit_57.ogg"]),
("man_hit_cut_weak", sf_priority_9|sf_vol_10, ["man_hit_13.ogg","man_hit_29.ogg","man_hit_32.ogg","man_hit_47.ogg","man_hit_57.ogg"]),
("man_hit_cut_strong", sf_priority_10|sf_vol_10, ["man_hit_13.ogg","man_hit_29.ogg","man_hit_32.ogg","man_hit_47.ogg","man_hit_57.ogg"]),
("man_victory", sf_priority_5|sf_vol_10, ["man_victory_3.ogg","man_victory_4.ogg","man_victory_5.ogg","man_victory_8.ogg","man_victory_15.ogg","man_victory_49.ogg","man_victory_52.ogg","man_victory_54.ogg","man_victory_57.ogg","man_victory_71.ogg"]),
("fire_loop", sf_priority_5|sf_vol_15|sf_looping|sf_start_at_random_pos, ["Fire_Torch_Loop3.ogg"]),
("torch_loop", sf_priority_4|sf_vol_15|sf_looping|sf_start_at_random_pos, ["Fire_Torch_Loop3.ogg"]),
("dummy_hit", sf_priority_6|sf_vol_10, ["shield_hit_cut_3.ogg","shield_hit_cut_5.ogg"]),
("dummy_destroyed", sf_priority_7|sf_vol_10, ["shield_broken.ogg"]),
("gourd_destroyed", sf_priority_7|sf_vol_10, ["shield_broken.ogg"]),
("cow_moo", sf_priority_6|sf_vol_12, ["cow_moo_1.ogg"]),
("cow_slaughter", sf_priority_9|sf_vol_12, ["cow_slaughter.ogg"]),
("distant_dog_bark", sf_priority_3|sf_vol_15|sf_stream_from_hd, ["d_dog1.ogg","d_dog2.ogg","d_dog3.ogg","d_dog7.ogg"]),
("distant_owl", sf_priority_3|sf_vol_15|sf_stream_from_hd, ["d_owl2.ogg","d_owl3.ogg","d_owl4.ogg"]),
("distant_chicken", sf_priority_3|sf_vol_15|sf_stream_from_hd, ["d_chicken1.ogg","d_chicken2.ogg"]),
("distant_carpenter", sf_priority_3|sf_vol_15|sf_stream_from_hd, ["d_carpenter1.ogg","d_saw_short3.ogg"]),
("distant_blacksmith", sf_priority_3|sf_vol_15|sf_stream_from_hd, ["d_blacksmith2.ogg"]),
("arena_ambiance", sf_priority_5|sf_vol_15|sf_looping|sf_stream_from_hd, ["arena_loop11.ogg"]),
("town_ambiance", sf_priority_5|sf_vol_15|sf_looping|sf_stream_from_hd, ["town_loop_3.ogg"]),
("tutorial_fail", sf_2d|sf_priority_10|sf_vol_7,["cue_failure.ogg"]),
("your_flag_taken", sf_2d|sf_priority_10|sf_vol_10, ["your_flag_taken.ogg"]),
("enemy_flag_taken", sf_2d|sf_priority_10|sf_vol_10, ["enemy_flag_taken.ogg"]),
("flag_returned", sf_2d|sf_priority_10|sf_vol_10, ["your_flag_returned.ogg"]),
("team_scored_a_point", sf_2d|sf_priority_10|sf_vol_10, ["you_scored_a_point.ogg"]),
("enemy_scored_a_point", sf_2d|sf_priority_10|sf_vol_10, ["enemy_scored_a_point.ogg"]),
("failure", sf_2d|sf_priority_6|sf_vol_5, ["cue_failure.ogg"]),
("man_yawn", sf_priority_6|sf_vol_10, ["man_yawn_1.ogg"]),
("man_cough", sf_priority_6|sf_vol_10, ["man_cough_1.ogg","man_cough_2.ogg","man_cough_3.ogg"]),
("man_drown", sf_priority_9|sf_vol_10, ["man_stun_1.ogg","man_ugh_7.ogg","man_ugh_13.ogg","man_ugh_17.ogg"]),
("woman_drown", sf_priority_9|sf_vol_10, ["woman_hit_b_2.ogg","woman_hit_2.ogg"]),
("cut_wood", sf_priority_9|sf_vol_10, ["shield_hit_cut_3.ogg","shield_hit_cut_5.ogg"]),
("cut_wood_break", sf_priority_10|sf_vol_10, ["shield_hit_cut_4.ogg"]),
("cut_wood_scratch", sf_priority_6|sf_vol_10, ["wooden_hit_high_armor_low_damage_1.ogg","wooden_hit_high_armor_low_damage_2b.ogg"]),
("mining_hit", sf_priority_9|sf_vol_10, ["hit_wood_metal_7.ogg","hit_metal_metal_1.ogg","hit_metal_metal_2.ogg","hit_metal_metal_4.ogg","hit_metal_metal_5.ogg"]),
("mining_scratch", sf_priority_6|sf_vol_10, ["hit_metal_metal_3.ogg","hit_metal_metal_6.ogg"]),
("repair_wood", sf_priority_9|sf_vol_10, ["hit_wood_wood_2.ogg","hit_wood_wood_3.ogg","hit_wood_wood_4.ogg","hit_wood_metal_4.ogg","hit_wood_metal_5.ogg"]),
("saw_wood", sf_priority_7|sf_vol_10, ["d_saw_short3.ogg"]),
("blacksmith", sf_priority_7|sf_vol_10, ["d_blacksmith2.ogg"]),
("damage_ship", sf_priority_9|sf_vol_10, ["shield_broken.ogg"]),
("lock", sf_priority_10|sf_vol_10, ["hit_wood_metal_6.ogg"]),
("pick_lock_fail", sf_priority_10|sf_vol_10, ["hit_wood_wood_1.ogg"]),
("fire", sf_priority_6|sf_vol_10, ["Fire_Small_Crackle_Slick_op.ogg"]),
("horse_neigh", sf_priority_8|sf_vol_10, ["horse_exterior_whinny_01.ogg","horse_exterior_whinny_02.ogg","horse_exterior_whinny_03.ogg","horse_exterior_whinny_04.ogg","horse_exterior_whinny_05.ogg","horse_whinny.ogg"]),
("pull_flax", sf_priority_6|sf_vol_3, ["draw_other.ogg"]),
("away_vile_beggar", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_nobleman_1.ogg"]),
("my_lord", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_farmer_7.ogg","encounter_farmer_9.ogg"]),
("almost_harvesting_season", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_farmer_2.ogg"]),
("whats_this_then", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_river_pirates_5.ogg"]),
("out_for_a_stroll_are_we", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_river_pirates_6.ogg"]),
("we_ride_to_war", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_vaegirs_neutral_2.ogg","encounter_vaegirs_ally.ogg"]),
("less_talking_more_raiding", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_sea_raider_10.ogg"]),
("you_there_stop", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["sneak_halt_1.ogg","sneak_halt_2.ogg"]),
("tear_you_limb_from_limb", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_river_pirates_9.ogg","encounter_river_pirates_10.ogg"]),
("better_not_be_a_manhunter", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_river_pirates_4.ogg"]),
("drink_from_your_skull", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_sea_raider_5.ogg"]),
("gods_will_decide_your_fate", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_sea_raider_9.ogg"]),
("nice_head_on_shoulders", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_sea_raider_9b.ogg"]),
("hunt_you_down", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_steppe_bandit_8.ogg","encounter_steppe_bandit_10.ogg"]),
("dead_men_tell_no_tales", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_steppe_bandit_3.ogg"]),
("stand_and_deliver", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_bandit_12.ogg"]),
("your_money_or_your_life", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_bandit_2.ogg","encounter_steppe_bandit_12.ogg"]),
("have_our_pay_or_fun", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_bandit_10.ogg"]),
("word_about_purse_belongings", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_bandit_13.ogg"]),
("easy_way_or_hard_way", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_bandit_15.ogg"]),
("everything_has_a_price", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_bandit_16.ogg"]),
("slit_your_throat", sf_priority_10|sf_vol_10|sf_stream_from_hd, ["encounter_bandit_9.ogg"]),
("sounds_end", 0, []),
]
|
jeremiedecock/snippets
|
refs/heads/master
|
python/time/process_time.py
|
1
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 Jérémie DECOCK (http://www.jdhp.org)
# See: https://docs.python.org/3/library/time.html#time.process_time
# http://stackoverflow.com/questions/7370801/measure-time-elapsed-in-python
import time
initial_time = time.process_time()
#do some stuff
time.sleep(1)
final_time = time.process_time()
elapsed_time = final_time - initial_time
print(elapsed_time, "sec")
|
chirilo/kuma
|
refs/heads/master
|
vendor/packages/pygments/formatters/rtf.py
|
73
|
# -*- coding: utf-8 -*-
"""
pygments.formatters.rtf
~~~~~~~~~~~~~~~~~~~~~~~
A formatter that generates RTF files.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
from pygments.util import get_int_opt, _surrogatepair
__all__ = ['RtfFormatter']
class RtfFormatter(Formatter):
"""
Format tokens as RTF markup. This formatter automatically outputs full RTF
documents with color information and other useful stuff. Perfect for Copy and
Paste into Microsoft(R) Word(R) documents.
Please note that ``encoding`` and ``outencoding`` options are ignored.
The RTF format is ASCII natively, but handles unicode characters correctly
thanks to escape sequences.
.. versionadded:: 0.6
Additional options accepted:
`style`
The style to use, can be a string or a Style subclass (default:
``'default'``).
`fontface`
The used font famliy, for example ``Bitstream Vera Sans``. Defaults to
some generic font which is supposed to have fixed width.
`fontsize`
Size of the font used. Size is specified in half points. The
default is 24 half-points, giving a size 12 font.
.. versionadded:: 2.0
"""
name = 'RTF'
aliases = ['rtf']
filenames = ['*.rtf']
def __init__(self, **options):
r"""
Additional options accepted:
``fontface``
Name of the font used. Could for example be ``'Courier New'``
to further specify the default which is ``'\fmodern'``. The RTF
specification claims that ``\fmodern`` are "Fixed-pitch serif
and sans serif fonts". Hope every RTF implementation thinks
the same about modern...
"""
Formatter.__init__(self, **options)
self.fontface = options.get('fontface') or ''
self.fontsize = get_int_opt(options, 'fontsize', 0)
def _escape(self, text):
return text.replace(u'\\', u'\\\\') \
.replace(u'{', u'\\{') \
.replace(u'}', u'\\}')
def _escape_text(self, text):
# empty strings, should give a small performance improvment
if not text:
return u''
# escape text
text = self._escape(text)
buf = []
for c in text:
cn = ord(c)
if cn < (2**7):
# ASCII character
buf.append(str(c))
elif (2**7) <= cn < (2**16):
# single unicode escape sequence
buf.append(u'{\\u%d}' % cn)
elif (2**16) <= cn:
# RTF limits unicode to 16 bits.
# Force surrogate pairs
buf.append(u'{\\u%d}{\\u%d}' % _surrogatepair(cn))
return u''.join(buf).replace(u'\n', u'\\par\n')
def format_unencoded(self, tokensource, outfile):
# rtf 1.8 header
outfile.write(u'{\\rtf1\\ansi\\uc0\\deff0'
u'{\\fonttbl{\\f0\\fmodern\\fprq1\\fcharset0%s;}}'
u'{\\colortbl;' % (self.fontface and
u' ' + self._escape(self.fontface) or
u''))
# convert colors and save them in a mapping to access them later.
color_mapping = {}
offset = 1
for _, style in self.style:
for color in style['color'], style['bgcolor'], style['border']:
if color and color not in color_mapping:
color_mapping[color] = offset
outfile.write(u'\\red%d\\green%d\\blue%d;' % (
int(color[0:2], 16),
int(color[2:4], 16),
int(color[4:6], 16)
))
offset += 1
outfile.write(u'}\\f0 ')
if self.fontsize:
outfile.write(u'\\fs%d' % (self.fontsize))
# highlight stream
for ttype, value in tokensource:
while not self.style.styles_token(ttype) and ttype.parent:
ttype = ttype.parent
style = self.style.style_for_token(ttype)
buf = []
if style['bgcolor']:
buf.append(u'\\cb%d' % color_mapping[style['bgcolor']])
if style['color']:
buf.append(u'\\cf%d' % color_mapping[style['color']])
if style['bold']:
buf.append(u'\\b')
if style['italic']:
buf.append(u'\\i')
if style['underline']:
buf.append(u'\\ul')
if style['border']:
buf.append(u'\\chbrdr\\chcfpat%d' %
color_mapping[style['border']])
start = u''.join(buf)
if start:
outfile.write(u'{%s ' % start)
outfile.write(self._escape_text(value))
if start:
outfile.write(u'}')
outfile.write(u'}')
|
qbilius/streams
|
refs/heads/master
|
streams/parallel.py
|
1
|
import sys, subprocess, time, glob, os, random, string, pickle
import inspect, argparse, threading, signal, ast
import tqdm
import dill
# from joblib import Parallel, delayed
import multiprocessing
import numpy as np
import pandas
COMPUTED = os.environ.get('COMPUTED', '')
class Parallel(object):
def __init__(self, func, n_iter, backend=None, timer=False, *args, **kwargs):
self.func = func
self.n_iter = n_iter
self.backend = backend
self.timer = timer
if backend is None:
self.parallel = 'loop'
elif backend == 'sbatch':
self.parallel = 'sbatch'
elif backend == 'sbatch_pickle':
self.parallel = SBatchPickle(*args, **kwargs)
elif backend == 'multiprocessing':
self.parallel = MultiProcessing(*args, **kwargs)
else:
raise ValueError('backend "{}" not recognized'.format(backend))
def __call__(self, *args, **kwargs):
if self.backend is None: # normal loop
return [self.func(iterno, *args, **kwargs) for iterno in tqdm.trange(self.n_iter, disable=not self.timer)]
elif self.backend == 'sbatch':
iternos = os.environ['PARALLEL_IDX']
iternos = iternos.split('_')
iterno = int(iternos[0])
if len(iternos) > 1:
os.environ['PARALLEL_IDX'] = '_'.join(iternos[1:])
return self.func(iterno, *args, **kwargs)
else:
return self.parallel(*args, **kwargs)
def prange(start=0, stop=None, step=1, backend=None, **tqdm_kwargs):
try:
iter(start)
except TypeError:
rng = np.arange(start, stop, step)
else:
rng = list(start)
if backend is None:
if os.uname()[1].startswith('node0'):
backend = 'sbatch'
if backend is None:
return tqdm.tqdm(rng, **tqdm_kwargs)
elif backend == 'sbatch':
iternos = os.environ['PARALLEL_IDX']
iternos = iternos.split('_')
iterno = int(iternos[0])
if len(iternos) > 1:
os.environ['PARALLEL_IDX'] = '_'.join(iternos[1:])
return [rng[iterno]]
# elif backend == 'multiprocessing':
# # results = []
# # for batch_no in tqdm.trange((self.n_iter - 1) // self.n_jobs + 1):
# pool = multiprocessing.Pool(processes=len(rng))
# # array = range(batch_no * self.n_jobs, (batch_no+1) * self.n_jobs)
# if hasattr(pool, 'starmap'):
# out = pool.starmap(self.func, ([i, args, kwargs] for i in rng))
# else:
# func_args = ([self.func, i, args, kwargs] for i in rng)
# out = pool.map(func_star, func_args)
# pool.close()
# pool.join()
# # results.extend(out)
# return out
else:
raise ValueError('backend "{}" not recognized'.format(backend))
class ParallelBase(object):
@property
def pid(self):
if not hasattr(self, '_pid'):
pid = self.id_generator(n=6)
filelist = glob.glob(os.path.join(self._save_path, '*'))
while any([pid in os.path.basename(f) for f in filelist]):
pid = self.id_generator(m=6)
self._pid = pid
return self._pid
def id_generator(self, n=6, chars=string.ascii_lowercase):
return ''.join(random.SystemRandom().choice(chars) for _ in range(n))
class SBatch(object):
def __init__(self, module, func, output_path=None, output_name=None, timer=False, save_path=None, **func_kwargs):
self.module = os.path.abspath(module)
self.func = func
self.func_kwargs = func_kwargs
if output_path is None:
rel_path = os.path.relpath(os.path.splitext(self.module)[0], os.environ['CODE'])
output_path = os.path.join(COMPUTED, rel_path)
if not os.path.isdir(output_path):
os.makedirs(output_path)
if output_name is None:
output_name = self.func + '.pkl'
self.output_file = os.path.join(output_path, output_name)
self.timer = timer
if save_path is None:
self._save_path = '/om/user/qbilius/tmp' # os.getcwd()
else:
self._save_path = save_path
template = self.pid + '_{}'
self.save_path = os.path.join(self._save_path, template)
self._stop = threading.Event()
signal.signal(signal.SIGINT, self._break)
def __call__(self, n_iters):
python_script_path = self.gen_python_script()
try:
iter(n_iters)
except:
self.n_iters = [n_iters]
else:
self.n_iters = n_iters
array = (0, np.prod(self.n_iters))
sbatch_path = self.gen_sbatch(python_script_path, array=array)
out = subprocess.check_output(['sbatch', sbatch_path])
out = out.decode('ascii')
out_str = 'Submitted batch job '
assert out[:len(out_str)] == out_str
self.job_id = out.split('\n')[0][len(out_str):]
self.sbatch_progress()
if not self._stop.is_set():
# check if output file was created; if not, there must have been an error
for i in range(array[0], array[1]):
outf = self.save_path.format('output_{}.pkl'.format(i))
if not os.path.isfile(outf):
with open(self.save_path.format('slurm_{}.out'.format(i))) as f:
msg = f.read()
print()
print(msg)
self._cleanup()
sys.exit()
else:
print('cleaning up...')
self._cleanup()
sys.exit()
results = self._combine_results()
self._cleanup()
pickle.dump(results, open(self.output_file, 'wb'))
return results
@property
def pid(self):
if not hasattr(self, '_pid'):
pid = self.id_generator(n=6)
filelist = glob.glob(os.path.join(self._save_path, '*'))
while any([pid in os.path.basename(f) for f in filelist]):
pid = self.id_generator(m=6)
self._pid = pid
return self._pid
def id_generator(self, n=6, chars=string.ascii_lowercase):
return ''.join(random.SystemRandom().choice(chars) for _ in range(n))
def gen_python_script(self):
mod_name = os.path.splitext(os.path.basename(self.module))[0]
output_name = self.save_path.format('output_{}.pkl')
script = ('import sys, os, imp, pickle',
'import numpy as np',
'sys.path.insert(0, "{}")',
'mod = imp.load_source("{}", "{}")',
'sh = [int(i) for i in os.environ["PARALLEL_SHAPE"].split("_")]',
'task_id = int(os.environ["SLURM_ARRAY_TASK_ID"])',
'n_iters = np.prod(sh)',
'idx = np.nonzero(np.arange(n_iters).reshape(sh)==task_id)',
'idx = "_".join(str(v[0]) for v in idx)',
'os.environ["PARALLEL_IDX"] = idx',
'res = getattr(mod, "{}")({})',
'pickle.dump(res, open("{}".format(task_id), "wb"))'
)
kwargs = []
for k,v in self.func_kwargs.items():
if isinstance(k, str):
inp = '{}="{}"'.format(k, v)
else:
inp = '{}={}'.format(k, v)
kwargs.append(inp)
kwargs = ', '.join(kwargs)
script = '\n'.join(script).format(os.getcwd(), mod_name, self.module, self.func, kwargs, output_name)
script_path = self.save_path.format('script.py')
with open(script_path, 'w') as f:
f.write(script)
return script_path
def gen_sbatch(self, callable_path, array=(0,100)):
slurm_out_file = self.save_path.format('slurm_%a.out')
script = ('#!/bin/bash',
'#SBATCH --array={}-{}',
'#SBATCH --time=7-00:00:00',
'#SBATCH --ntasks=1',
'#SBATCH --cpus-per-task=1',
'#SBATCH --mem=100G',
'#SBATCH --output="{}"',
'export PARALLEL_SHAPE={}',
'python "{}" $SLURM_ARRAY_TASK_ID')
shape = '_'.join([str(i) for i in self.n_iters])
script = '\n'.join(script).format(array[0], array[1] - 1, slurm_out_file,
shape, callable_path)
sbatch_path = self.save_path.format('sbatch.sh')
with open(sbatch_path, 'w') as f:
f.write(script)
return sbatch_path
def sbatch_progress_orig(self, job_id):
while not self._stop.is_set():
try:
jobs = subprocess.check_output('squeue -o %M -j {}'.format(job_id).split())
except:
print('Squeue error. Trying again in 10 sec...')
self._stop.wait(10) # queue busy, ask later
else:
jobs = jobs.decode('ascii')
if not jobs.startswith('TIME'):
print('Unexpected squeue output. Trying again in 10 sec...')
self._stop.wait(10) # queue busy, ask later
elif len(jobs.split('\n')) > 2: # still running
if self.timer:
t = jobs.split('\n')[-2]
print('\rJob {} (id: {}): {}'.format(job_id, self.pid, t), end='')
sys.stdout.flush()
self._stop.wait(10)
else:
break
if self.timer:
print()
def sbatch_progress(self, wait=10):
if self.timer:
os.system('setterm -cursor off')
while not self._stop.is_set():
try:
cmd = 'sacct -j {} -o State -X'.format(self.job_id).split()
jobs = subprocess.check_output(cmd)
except:
print('\rSqueue error. Trying again in {} sec...'.format(wait),
end='', flush=True)
self._stop.wait(wait) # queue busy, ask later
else:
jobs = jobs.decode('ascii')
if not jobs.startswith(' State'):
print('\rUnexpected squeue output. Trying again in {} sec...'.format(wait),
end='', flush=True)
self._stop.wait(wait) # queue busy, ask later
else:
status = jobs.split('\n')[2:-1]
count = {}
for st in status:
st = st.strip('\r\n').lstrip().rstrip()
if st in count:
count[st] += 1
else:
count[st] = 1
status = ', '.join(['{}: {}'.format(k,v) for k,v in count.items()])
if self.timer:
print('\r' + ' ' * 79, end='')
print('\rJob {} (id: {}) -- {}'.format(self.job_id, self.pid, status),
end='', flush=True)
if 'COMPLETED' in count and len(count) == 1:
break
self._stop.wait(wait)
if self.timer:
os.system('setterm -cursor on')
print()
def _break(self, signum, frame):
self._stop.set()
subprocess.check_output(['scancel', self.job_id])
def _cleanup(self):
for fname in ['sbatch.sh', 'script.py']:
try:
os.remove(self.save_path.format(fname))
except:
pass
for fname in ['slurm_{}.out', 'output_{}.pkl']:
for i in range(np.prod(self.n_iters)):
try:
os.remove(self.save_path.format(fname).format(i))
except:
pass
def _combine_results(self):
results = []
for i in range(np.prod(self.n_iters)):
outf = self.save_path.format('output_{}.pkl'.format(i))
res = pickle.load(open(outf, 'rb'))
results.append(res)
try:
results = pandas.concat(results, ignore_index=True)
except:
pass
return results
class SBatchPickle(ParallelBase):
def __init__(self, func, n_jobs=None, n_iter=1, backend='sbatch', timer=False,
save_path=None):
self.func = func
if n_jobs is None:
self.n_jobs = n_iter
else:
self.n_jobs = min(n_jobs, n_iter)
self.n_iter = n_iter
self.backend = backend
self.timer = timer
if save_path is None:
self._save_path = '/om/user/qbilius/tmp' # os.getcwd()
else:
self._save_path = save_path
template = 'parallel_' + self.pid + '_{}'
self.save_path = os.path.join(self._save_path, template)
if self.backend == 'sbatch':
frames = inspect.getouterframes(inspect.currentframe())
self._callers = [inspect.getmodule(frame[0]) for frame in frames[::-1]]
self._callers = [c for c in self._callers if c is not None]
self._callers = self._callers[-1:]
# self._caller = inspect.getmodule(frame)
# self._caller_path = os.path.dirname(os.path.abspath(self._caller.__file__))
# import pdb; pdb.set_trace()
def __call__(self, *args, **kwargs):
if self.n_jobs == 1: # run normally
res = [self.func(i, *args, **kwargs) for i in tqdm.trange(self.n_iter)]
else:
res = self._sbatch_run(*args, **kwargs)
return res
def _func_writer(self, iterno, *args, **kwargs):
res = self.func(iterno, *args, **kwargs)
tempf = self.save_path.format('output_{}.pkl'.format(iterno))
dill.dump(res, open(tempf, 'wb'))
def gen_sbatch_array(self, array=(0,100)):
tempf = self.save_path.format('slurm_%a.out')
callable_path = self.save_path.format('script.py')
script = ('#!/bin/bash\n'
'#SBATCH --array={}-{}\n'
'#SBATCH --time=7-00:00:00\n'
'#SBATCH --ntasks=1\n'
'#SBATCH --output="{}"\n'
'python "{}" $SLURM_ARRAY_TASK_ID'.format(
array[0], array[1]-1, tempf, callable_path)
)
with open(self.save_path.format('sbatch.sh'), 'wb') as f:
f.write(script)
def gen_sbatch_python(self, *args, **kwargs):
tempf = self.save_path.format('vars.pkl')
dill.dump([self._callers, self._func_writer, args, kwargs], open(tempf, 'wb'))
script = ('import sys\n'
'def run(iterno):\n'
' import sys, dill\n'
' sys.path.insert(0, "{}")\n'.format(os.getcwd()))
# add all modules
for caller in self._callers:
path = os.path.dirname(os.path.abspath(caller.__file__))
script += ' sys.path.insert(0, "{}")\n'.format(path)
script += ' mods, func, args, kwargs = dill.load(open("{}"))\n'.format(tempf)
for i, caller in enumerate(self._callers):
for global_var in dir(caller):
if not global_var[:2] == '__' and not global_var[-2:] == '__': # no built-in
script += ' {} = mods[{}].{}\n'.format(global_var, i, global_var)
script += (' func(iterno, *args, **kwargs)\n'
'if __name__ == "__main__":\n'
' run(int(sys.argv[1]))')
with open(self.save_path.format('script.py'), 'wb') as f:
f.write(script)
def _sbatch_run(self, *args, **kwargs):
self.gen_sbatch_python(*args, **kwargs)
for batch_no in range((self.n_iter - 1) // self.n_jobs + 1):
array = (batch_no * self.n_jobs, (batch_no+1) * self.n_jobs)
self.gen_sbatch_array(array=array)
# run the script and wait for it to complete
out = subprocess.check_output(['sbatch', self.save_path.format('sbatch.sh')])
out_str = 'Submitted batch job '
assert out[:len(out_str)] == out_str
job_id = out.split('\n')[0][len(out_str):]
while True:
try:
jobs = subprocess.check_output('squeue -o %M -j {}'.format(job_id).split())
except:
print('Squeue error. Trying again in 10 sec...')
time.sleep(10) # queue busy, ask later
else:
if not jobs.startswith('TIME'):
print('Unexpected squeue output. Trying again in 10 sec...')
time.sleep(10) # queue busy, ask later
elif len(jobs.split('\n')) > 2: # still running
if self.timer:
t = jobs.split('\n')[-2]
print('\rJob {}: {}'.format(job_id, t), end='')
sys.stdout.flush()
time.sleep(10)
else:
break
# check if output file was created; if not, there must have been an error
for i in range(array[0], array[1]):
outf = self.save_path.format('output_{}.pkl'.format(i))
if not os.path.isfile(outf):
with open(self.save_path.format('slurm_{}.out'.format(i))) as f:
msg = f.read()
print(msg)
raise Exception('Output file {} not found. See error log above.'.format(outf))
results = self._combine_results()
self._cleanup()
return results
def _cleanup(self):
for fname in ['sbatch.sh', 'script.py', 'vars.pkl']:
os.remove(self.save_path.format(fname))
for fname in ['slurm_{}.out', 'output_{}.pkl']:
for i in range(self.n_iter):
os.remove(self.save_path.format(fname).format(i))
def _combine_results(self):
results = []
for i in range(self.n_iter):
outf = self.save_path.format('output_{}.pkl'.format(i))
res = dill.load(open(outf))
results.append(res)
return results
class MultiProcessing(ParallelBase):
def __init__(self, func, n_jobs=None, n_iter=1, backend='sbatch', timer=False,
save_path=None):
self.func = func
if n_jobs is None:
self.n_jobs = n_iter
else:
self.n_jobs = min(n_jobs, n_iter)
self.n_iter = n_iter
self.backend = backend
self.timer = timer
if save_path is None:
self._save_path = '/om/user/qbilius/tmp' # os.getcwd()
else:
self._save_path = save_path
template = 'parallel_' + self.pid + '_{}'
self.save_path = os.path.join(self._save_path, template)
def __call__(self, *args, **kwargs):
if self.n_jobs == 1: # run normally
res = [self.func(i, *args, **kwargs) for i in tqdm.trange(self.n_iter)]
else:
res = self._sbatch_run(*args, **kwargs)
return res
def _multiproc_run(self, *args, **kwargs):
results = []
for batch_no in tqdm.trange((self.n_iter - 1) // self.n_jobs + 1):
pool = multiprocessing.Pool(processes=self.n_jobs)
array = range(batch_no * self.n_jobs, (batch_no+1) * self.n_jobs)
if hasattr(pool, 'starmap'):
out = pool.starmap(self.func, ([i, args, kwargs] for i in array))
else:
func_args = ([self.func, i, args, kwargs] for i in array)
out = pool.map(func_star, func_args)
pool.close()
pool.join()
results.extend(out)
return results
def func_star(args):
func, iterno, args, kwargs = args
return func(iterno, *args, **kwargs)
def run():
parser = argparse.ArgumentParser()
parser.add_argument('module', help='path to the Python script you want to run')
parser.add_argument('func', help='function to call')
parser.add_argument('n_iters', help='number of iterations')
parser.add_argument('-o', '--output_name', default=None, help='combined file name')
parser.add_argument('--output_path', '--output_path', default=None, help='where to save the combined file')
# parser.add_argument('-p', '--output_prefix', default='', help='prefix to the output path')
parser.add_argument('--timer', default=True, action='store_true', help='whether to show a timer')
parser.add_argument('--save_path', default=None, help='temporary place for storing intermediate results')
args, func_args = parser.parse_known_args()
func_kwargs = {k.strip('-'):v for k,v in zip(*[iter(func_args)] * 2)}
for k, v in func_kwargs.items():
try:
func_kwargs[k] = ast.literal_eval(v)
except:
pass
kwargs = {k:v for k,v in args.__dict__.items() if k != 'n_iters'}
kwargs.update(func_kwargs)
SBatch(**kwargs)(eval(args.__dict__['n_iters']))
if __name__ == '__main__':
run()
|
procrastinatio/mapproxy
|
refs/heads/master
|
mapproxy/util/async.py
|
5
|
# This file is part of the MapProxy project.
# Copyright (C) 2011 Omniscale <http://omniscale.de>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
MAX_MAP_ASYNC_THREADS = 20
try:
import Queue
except ImportError:
import queue as Queue
import sys
import threading
try:
import eventlet
import eventlet.greenpool
import eventlet.tpool
import eventlet.patcher
_has_eventlet = True
import eventlet.debug
eventlet.debug.hub_exceptions(False)
except ImportError:
_has_eventlet = False
from mapproxy.config import base_config
from mapproxy.config import local_base_config
from mapproxy.compat import PY2
import logging
log_system = logging.getLogger('mapproxy.system')
class AsyncResult(object):
def __init__(self, result=None, exception=None):
self.result = result
self.exception = exception
def __repr__(self):
return "<AsyncResult result='%s' exception='%s'>" % (
self.result, self.exception)
def _result_iter(results, use_result_objects=False):
for result in results:
if use_result_objects:
exception = None
if (isinstance(result, tuple) and len(result) == 3 and
isinstance(result[1], Exception)):
exception = result
result = None
yield AsyncResult(result, exception)
else:
yield result
class EventletPool(object):
def __init__(self, size=100):
self.size = size
self.base_config = base_config()
def shutdown(self, force=False):
# there is not way to stop a GreenPool
pass
def map(self, func, *args, **kw):
return list(self.imap(func, *args, **kw))
def imap(self, func, *args, **kw):
use_result_objects = kw.get('use_result_objects', False)
def call(*args):
with local_base_config(self.base_config):
try:
return func(*args)
except Exception:
if use_result_objects:
return sys.exc_info()
else:
raise
if len(args[0]) == 1:
eventlet.sleep()
return _result_iter([call(*zip(*args)[0])], use_result_objects)
pool = eventlet.greenpool.GreenPool(self.size)
return _result_iter(pool.imap(call, *args), use_result_objects)
def starmap(self, func, args, **kw):
use_result_objects = kw.get('use_result_objects', False)
def call(*args):
with local_base_config(self.base_config):
try:
return func(*args)
except Exception:
if use_result_objects:
return sys.exc_info()
else:
raise
if len(args) == 1:
eventlet.sleep()
return _result_iter([call(*args[0])], use_result_objects)
pool = eventlet.greenpool.GreenPool(self.size)
return _result_iter(pool.starmap(call, args), use_result_objects)
def starcall(self, args, **kw):
use_result_objects = kw.get('use_result_objects', False)
def call(func, *args):
with local_base_config(self.base_config):
try:
return func(*args)
except Exception:
if use_result_objects:
return sys.exc_info()
else:
raise
if len(args) == 1:
eventlet.sleep()
return _result_iter([call(args[0][0], *args[0][1:])], use_result_objects)
pool = eventlet.greenpool.GreenPool(self.size)
return _result_iter(pool.starmap(call, args), use_result_objects)
class ThreadWorker(threading.Thread):
def __init__(self, task_queue, result_queue):
threading.Thread.__init__(self)
self.task_queue = task_queue
self.result_queue = result_queue
self.base_config = base_config()
def run(self):
with local_base_config(self.base_config):
while True:
task = self.task_queue.get()
if task is None:
self.task_queue.task_done()
break
exec_id, func, args = task
try:
result = func(*args)
except Exception:
result = sys.exc_info()
self.result_queue.put((exec_id, result))
self.task_queue.task_done()
def _consume_queue(queue):
"""
Get all items from queue.
"""
while not queue.empty():
try:
queue.get(block=False)
queue.task_done()
except Queue.Empty:
pass
class ThreadPool(object):
def __init__(self, size=4):
self.pool_size = size
self.task_queue = Queue.Queue()
self.result_queue = Queue.Queue()
self.pool = None
def map_each(self, func_args, raise_exceptions):
"""
args should be a list of function arg tuples.
map_each calls each function with the given arg.
"""
if self.pool_size < 2:
for func, arg in func_args:
try:
yield func(*arg)
except Exception:
yield sys.exc_info()
raise StopIteration()
self.pool = self._init_pool()
i = 0
for i, (func, arg) in enumerate(func_args):
self.task_queue.put((i, func, arg))
results = {}
next_result = 0
for value in self._get_results(next_result, results, raise_exceptions):
yield value
next_result += 1
self.task_queue.join()
for value in self._get_results(next_result, results, raise_exceptions):
yield value
next_result += 1
self.shutdown()
def _single_call(self, func, args, use_result_objects):
try:
result = func(*args)
except Exception:
if not use_result_objects:
raise
result = sys.exc_info()
return _result_iter([result], use_result_objects)
def map(self, func, *args, **kw):
return list(self.imap(func, *args, **kw))
def imap(self, func, *args, **kw):
use_result_objects = kw.get('use_result_objects', False)
if len(args[0]) == 1:
return self._single_call(func, next(iter(zip(*args))), use_result_objects)
return _result_iter(self.map_each([(func, arg) for arg in zip(*args)], raise_exceptions=not use_result_objects),
use_result_objects)
def starmap(self, func, args, **kw):
use_result_objects = kw.get('use_result_objects', False)
if len(args[0]) == 1:
return self._single_call(func, args[0], use_result_objects)
return _result_iter(self.map_each([(func, arg) for arg in args], raise_exceptions=not use_result_objects),
use_result_objects)
def starcall(self, args, **kw):
def call(func, *args):
return func(*args)
return self.starmap(call, args, **kw)
def _get_results(self, next_result, results, raise_exceptions):
for i, value in self._fetch_results(raise_exceptions):
if i == next_result:
yield value
next_result += 1
while next_result in results:
yield results.pop(next_result)
next_result += 1
else:
results[i] = value
def _fetch_results(self, raise_exceptions):
while not self.task_queue.empty() or not self.result_queue.empty():
task_result = self.result_queue.get()
if (raise_exceptions and isinstance(task_result[1], tuple) and
len(task_result[1]) == 3 and
isinstance(task_result[1][1], Exception)):
self.shutdown(force=True)
exc_class, exc, tb = task_result[1]
if PY2:
exec('raise exc_class, exc, tb')
else:
raise exc.with_traceback(tb)
yield task_result
def shutdown(self, force=False):
"""
Send shutdown sentinel to all executor threads. If `force` is True,
clean task_queue and result_queue.
"""
if force:
_consume_queue(self.task_queue)
_consume_queue(self.result_queue)
for _ in range(self.pool_size):
self.task_queue.put(None)
def _init_pool(self):
if self.pool_size < 2:
return []
pool = []
for _ in range(self.pool_size):
t = ThreadWorker(self.task_queue, self.result_queue)
t.daemon = True
t.start()
pool.append(t)
return pool
def imap_async_eventlet(func, *args):
pool = EventletPool()
return pool.imap(func, *args)
def imap_async_threaded(func, *args):
pool = ThreadPool(min(len(args[0]), MAX_MAP_ASYNC_THREADS))
return pool.imap(func, *args)
def starmap_async_eventlet(func, args):
pool = EventletPool()
return pool.starmap(func, args)
def starmap_async_threaded(func, args):
pool = ThreadPool(min(len(args[0]), MAX_MAP_ASYNC_THREADS))
return pool.starmap(func, args)
def starcall_async_eventlet(args):
pool = EventletPool()
return pool.starcall(args)
def starcall_async_threaded(args):
pool = ThreadPool(min(len(args[0]), MAX_MAP_ASYNC_THREADS))
return pool.starcall(args)
def run_non_blocking_eventlet(func, args, kw={}):
return eventlet.tpool.execute(func, *args, **kw)
def run_non_blocking_threaded(func, args, kw={}):
return func(*args, **kw)
def import_module(module):
"""
Import ``module``. Import patched version if eventlet is used.
"""
if uses_eventlet:
return eventlet.import_patched(module)
else:
return __import__(module)
uses_eventlet = False
# socket should be monkey patched when MapProxy runs inside eventlet
if _has_eventlet and eventlet.patcher.is_monkey_patched('socket'):
uses_eventlet = True
log_system.info('using eventlet for asynchronous operations')
imap = imap_async_eventlet
starmap = starmap_async_eventlet
starcall = starcall_async_eventlet
Pool = EventletPool
run_non_blocking = run_non_blocking_eventlet
else:
imap = imap_async_threaded
starmap = starmap_async_threaded
starcall = starcall_async_threaded
Pool = ThreadPool
run_non_blocking = run_non_blocking_threaded
|
googleads/googleads-dfa-reporting-samples
|
refs/heads/master
|
python/v3_5/configure_dynamic_asset_selection.py
|
3
|
#!/usr/bin/python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example enables dynamic asset selection for an in-stream video creative.
Requires an existing in-stream video creative, a new video asset, and a
targeting template ID as input. To get an in-stream video creative, run
create_instream_video_creative.py. To get a targeting template, run
create_targeting_template.py.
"""
import argparse
import sys
from apiclient.http import MediaFileUpload
import dfareporting_utils
from oauth2client import client
# Declare command-line flags.
argparser = argparse.ArgumentParser(add_help=False)
argparser.add_argument(
'profile_id', type=int,
help='The ID of the profile to configure dynamic asset selection for')
argparser.add_argument(
'creative_id', type=int,
help='The ID of the in-stream video creative to configure selection for.')
argparser.add_argument(
'template_id', type=int,
help='The ID of the template to use for targeting.')
argparser.add_argument(
'video_name', help='Suggested name to use for the uploaded creative asset.')
argparser.add_argument(
'path_to_video_file', help='Path to the asset file to be uploaded.')
def main(argv):
# Retrieve command line arguments.
flags = dfareporting_utils.get_arguments(argv, __doc__, parents=[argparser])
# Authenticate and construct service.
service = dfareporting_utils.setup(flags)
profile_id = flags.profile_id
creative_id = flags.creative_id
template_id = flags.template_id
path_to_video_file = flags.path_to_video_file
video_name = flags.video_name
try:
# Retrieve the specified creative.
creative = service.creatives().get(profileId=profile_id,
id=creative_id).execute()
if not creative or creative['type'] != 'INSTREAM_VIDEO':
sys.exit('Invalid creative specified.')
if 'creativeAssetSelection' not in creative:
# Locate an existing video asset to use as a default.
default_asset_id = next((asset['id']
for asset in creative['creativeAssets']
if asset['role'] == 'PARENT_VIDEO'), None)
if not default_asset_id:
sys.exit('Default video asset could not be found.')
# Enable dynamic asset selection for the creative.
creative['dynamicAssetSelection'] = True
# Create a new selection using the existing asset as a default.
creative['creativeAssetSelection'] = {
'defaultAssetId': default_asset_id,
'rules': []
}
# Upload the new video asset and add it to the creative.
video_asset = upload_creative_asset(
service, profile_id, creative['advertiserId'], video_name,
path_to_video_file, 'VIDEO')
creative['creativeAssets'].append({
'assetIdentifier': video_asset['assetIdentifier'],
'role': 'PARENT_VIDEO'
})
# Create a rule targeting the new video asset and add it to the creative.
creative['creativeAssetSelection']['rules'].append({
'assetId': video_asset['id'],
'name': 'Test rule for asset %s' % video_asset['id'],
'targetingTemplateId': template_id
})
request = service.creatives().update(profileId=profile_id, body=creative)
# Execute request and print response.
response = request.execute()
print ('Dynamic asset selection enabled for creative with ID %s.'
% response['id'])
except client.AccessTokenRefreshError:
print ('The credentials have been revoked or expired, please re-run the '
'application to re-authorize')
def upload_creative_asset(
service, profile_id, advertiser_id, asset_name, path_to_asset_file,
asset_type):
"""Uploads a creative asset and returns a creative asset metadata object."""
# Construct the creative asset metadata
creative_asset = {
'assetIdentifier': {
'name': asset_name,
'type': asset_type
}
}
media = MediaFileUpload(path_to_asset_file)
if not media.mimetype():
media = MediaFileUpload(path_to_asset_file, 'application/octet-stream')
response = service.creativeAssets().insert(
advertiserId=advertiser_id,
profileId=profile_id,
media_body=media,
body=creative_asset).execute()
return response
if __name__ == '__main__':
main(sys.argv)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.